From 63e90f94a84a0ac3530c4d9fa304ccf196a5e89a Mon Sep 17 00:00:00 2001 From: TheK0tYaRa Date: Fri, 6 Mar 2026 23:48:46 +0300 Subject: [PATCH] added v1.12.0 sources --- .../.cargo/config.toml | 12 + matrix-authentication-service/.codecov.yml | 10 + .../.config/nextest.toml | 7 + matrix-authentication-service/.dockerignore | 17 + matrix-authentication-service/.editorconfig | 16 + .../.github/CODEOWNERS | 1 + .../.github/actions/build-frontend/action.yml | 25 + .../.github/actions/build-policies/action.yml | 21 + .../.github/dependabot.yml | 113 + .../.github/release.yml | 45 + .../.github/scripts/.gitignore | 7 + .../.github/scripts/cleanup-pr.cjs | 44 + .../.github/scripts/commit-and-tag.cjs | 66 + .../.github/scripts/create-release-branch.cjs | 22 + .../.github/scripts/create-version-tag.cjs | 24 + .../.github/scripts/merge-back.cjs | 60 + .../.github/scripts/package.json | 7 + .../.github/scripts/update-release-branch.cjs | 22 + .../.github/scripts/update-unstable-tag.cjs | 21 + .../.github/workflows/build.yaml | 469 + .../.github/workflows/ci.yaml | 338 + .../.github/workflows/coverage.yaml | 139 + .../.github/workflows/docs.yaml | 77 + .../.github/workflows/merge-back.yaml | 40 + .../.github/workflows/release-branch.yaml | 123 + .../.github/workflows/release-bump.yaml | 93 + .../.github/workflows/tag.yaml | 71 + .../workflows/translations-download.yaml | 63 + .../workflows/translations-upload.yaml | 41 + matrix-authentication-service/.gitignore | 14 + matrix-authentication-service/.rustfmt.toml | 11 + matrix-authentication-service/CONTRIBUTING.md | 5 + matrix-authentication-service/Cargo.lock | 7919 ++++ matrix-authentication-service/Cargo.toml | 762 + matrix-authentication-service/Dockerfile | 169 + matrix-authentication-service/LICENSE | 661 + .../LICENSE-COMMERCIAL | 6 + matrix-authentication-service/README.md | 61 + matrix-authentication-service/biome.json | 60 + matrix-authentication-service/book.toml | 26 + matrix-authentication-service/clippy.toml | 22 + .../crates/axum-utils/Cargo.toml | 48 + .../axum-utils/src/client_authorization.rs | 739 + .../crates/axum-utils/src/cookies.rs | 169 + .../crates/axum-utils/src/csrf.rs | 165 + .../crates/axum-utils/src/error_wrapper.rs | 23 + .../crates/axum-utils/src/fancy_error.rs | 105 + .../crates/axum-utils/src/jwt.rs | 21 + .../axum-utils/src/language_detection.rs | 280 + .../crates/axum-utils/src/lib.rs | 27 + .../crates/axum-utils/src/sentry.rs | 65 + .../crates/axum-utils/src/session.rs | 101 + .../axum-utils/src/user_authorization.rs | 338 + .../crates/cli/Cargo.toml | 103 + .../crates/cli/build.rs | 36 + .../crates/cli/src/app_state.rs | 374 + .../crates/cli/src/commands/config.rs | 151 + .../crates/cli/src/commands/database.rs | 43 + .../crates/cli/src/commands/debug.rs | 70 + .../crates/cli/src/commands/doctor.rs | 410 + .../crates/cli/src/commands/manage.rs | 1244 + .../crates/cli/src/commands/mod.rs | 109 + .../crates/cli/src/commands/server.rs | 341 + .../crates/cli/src/commands/syn2mas.rs | 319 + .../crates/cli/src/commands/templates.rs | 145 + .../crates/cli/src/commands/worker.rs | 93 + .../crates/cli/src/lifecycle.rs | 239 + .../crates/cli/src/main.rs | 181 + .../crates/cli/src/server.rs | 429 + .../crates/cli/src/sync.rs | 430 + .../crates/cli/src/telemetry.rs | 293 + .../crates/cli/src/util.rs | 592 + .../crates/config/Cargo.toml | 53 + .../crates/config/src/bin/schema.rs | 14 + .../crates/config/src/lib.rs | 24 + .../crates/config/src/schema.rs | 27 + .../crates/config/src/sections/account.rs | 125 + .../crates/config/src/sections/branding.rs | 55 + .../crates/config/src/sections/captcha.rs | 83 + .../crates/config/src/sections/clients.rs | 353 + .../crates/config/src/sections/database.rs | 319 + .../crates/config/src/sections/email.rs | 280 + .../config/src/sections/experimental.rs | 126 + .../crates/config/src/sections/http.rs | 473 + .../crates/config/src/sections/matrix.rs | 235 + .../crates/config/src/sections/mod.rs | 386 + .../crates/config/src/sections/passwords.rs | 230 + .../crates/config/src/sections/policy.rs | 178 + .../config/src/sections/rate_limiting.rs | 298 + .../crates/config/src/sections/secrets.rs | 709 + .../crates/config/src/sections/telemetry.rs | 221 + .../crates/config/src/sections/templates.rs | 116 + .../config/src/sections/upstream_oauth2.rs | 803 + .../crates/config/src/util.rs | 76 + .../crates/context/Cargo.toml | 29 + .../crates/context/src/fmt.rs | 143 + .../crates/context/src/future.rs | 59 + .../crates/context/src/layer.rs | 41 + .../crates/context/src/lib.rs | 152 + .../crates/context/src/service.rs | 54 + .../crates/data-model/Cargo.toml | 38 + .../crates/data-model/examples/ua-parser.rs | 18 + .../crates/data-model/src/clock.rs | 119 + .../crates/data-model/src/compat/device.rs | 119 + .../crates/data-model/src/compat/mod.rs | 108 + .../crates/data-model/src/compat/session.rs | 106 + .../crates/data-model/src/compat/sso_login.rs | 204 + .../crates/data-model/src/lib.rs | 65 + .../src/oauth2/authorization_grant.rs | 366 + .../crates/data-model/src/oauth2/client.rs | 286 + .../src/oauth2/device_code_grant.rs | 262 + .../crates/data-model/src/oauth2/mod.rs | 19 + .../crates/data-model/src/oauth2/session.rs | 111 + .../crates/data-model/src/personal/mod.rs | 32 + .../crates/data-model/src/personal/session.rs | 141 + .../crates/data-model/src/policy_data.rs | 15 + .../crates/data-model/src/site_config.rs | 114 + .../crates/data-model/src/tokens.rs | 492 + .../data-model/src/upstream_oauth2/link.rs | 19 + .../data-model/src/upstream_oauth2/mod.rs | 26 + .../src/upstream_oauth2/provider.rs | 435 + .../data-model/src/upstream_oauth2/session.rs | 338 + .../crates/data-model/src/user_agent.rs | 225 + .../crates/data-model/src/users.rs | 287 + .../crates/data-model/src/utils.rs | 13 + .../crates/data-model/src/version.rs | 8 + .../crates/email/Cargo.toml | 25 + .../crates/email/src/lib.rs | 22 + .../crates/email/src/mailer.rs | 162 + .../crates/email/src/transport.rs | 147 + .../crates/handlers/Cargo.toml | 97 + .../handlers/src/activity_tracker/bound.rs | 62 + .../handlers/src/activity_tracker/mod.rs | 239 + .../handlers/src/activity_tracker/worker.rs | 269 + .../crates/handlers/src/admin/call_context.rs | 319 + .../crates/handlers/src/admin/mod.rs | 251 + .../crates/handlers/src/admin/model.rs | 956 + .../crates/handlers/src/admin/params.rs | 173 + .../crates/handlers/src/admin/response.rs | 296 + .../crates/handlers/src/admin/schema.rs | 59 + .../src/admin/v1/compat_sessions/finish.rs | 243 + .../src/admin/v1/compat_sessions/get.rs | 163 + .../src/admin/v1/compat_sessions/list.rs | 653 + .../src/admin/v1/compat_sessions/mod.rs | 14 + .../crates/handlers/src/admin/v1/mod.rs | 262 + .../src/admin/v1/oauth2_sessions/finish.rs | 234 + .../src/admin/v1/oauth2_sessions/get.rs | 154 + .../src/admin/v1/oauth2_sessions/list.rs | 454 + .../src/admin/v1/oauth2_sessions/mod.rs | 15 + .../src/admin/v1/personal_sessions/add.rs | 311 + .../src/admin/v1/personal_sessions/get.rs | 189 + .../src/admin/v1/personal_sessions/list.rs | 585 + .../src/admin/v1/personal_sessions/mod.rs | 39 + .../admin/v1/personal_sessions/regenerate.rs | 246 + .../src/admin/v1/personal_sessions/revoke.rs | 250 + .../handlers/src/admin/v1/policy_data/get.rs | 156 + .../src/admin/v1/policy_data/get_latest.rs | 152 + .../handlers/src/admin/v1/policy_data/mod.rs | 14 + .../handlers/src/admin/v1/policy_data/set.rs | 155 + .../handlers/src/admin/v1/site_config.rs | 97 + .../src/admin/v1/upstream_oauth_links/add.rs | 465 + .../admin/v1/upstream_oauth_links/delete.rs | 180 + .../src/admin/v1/upstream_oauth_links/get.rs | 173 + .../src/admin/v1/upstream_oauth_links/list.rs | 739 + .../src/admin/v1/upstream_oauth_links/mod.rs | 56 + .../admin/v1/upstream_oauth_providers/get.rs | 196 + .../admin/v1/upstream_oauth_providers/list.rs | 799 + .../admin/v1/upstream_oauth_providers/mod.rs | 12 + .../handlers/src/admin/v1/user_emails/add.rs | 324 + .../src/admin/v1/user_emails/delete.rs | 141 + .../handlers/src/admin/v1/user_emails/get.rs | 152 + .../handlers/src/admin/v1/user_emails/list.rs | 493 + .../handlers/src/admin/v1/user_emails/mod.rs | 16 + .../admin/v1/user_registration_tokens/add.rs | 262 + .../admin/v1/user_registration_tokens/get.rs | 175 + .../admin/v1/user_registration_tokens/list.rs | 1585 + .../admin/v1/user_registration_tokens/mod.rs | 21 + .../v1/user_registration_tokens/revoke.rs | 218 + .../v1/user_registration_tokens/unrevoke.rs | 238 + .../v1/user_registration_tokens/update.rs | 512 + .../src/admin/v1/user_sessions/finish.rs | 216 + .../src/admin/v1/user_sessions/get.rs | 138 + .../src/admin/v1/user_sessions/list.rs | 585 + .../src/admin/v1/user_sessions/mod.rs | 14 + .../crates/handlers/src/admin/v1/users/add.rs | 326 + .../src/admin/v1/users/by_username.rs | 83 + .../handlers/src/admin/v1/users/deactivate.rs | 324 + .../crates/handlers/src/admin/v1/users/get.rs | 75 + .../handlers/src/admin/v1/users/list.rs | 431 + .../handlers/src/admin/v1/users/lock.rs | 182 + .../crates/handlers/src/admin/v1/users/mod.rs | 29 + .../handlers/src/admin/v1/users/reactivate.rs | 222 + .../handlers/src/admin/v1/users/set_admin.rs | 161 + .../src/admin/v1/users/set_password.rs | 298 + .../handlers/src/admin/v1/users/unlock.rs | 213 + .../crates/handlers/src/admin/v1/version.rs | 62 + .../crates/handlers/src/bin/api-schema.rs | 88 + .../crates/handlers/src/bin/graphql-schema.rs | 19 + .../crates/handlers/src/captcha.rs | 257 + .../crates/handlers/src/cleanup_tests.rs | 774 + .../crates/handlers/src/compat/login.rs | 1515 + .../handlers/src/compat/login_sso_complete.rs | 303 + .../handlers/src/compat/login_sso_redirect.rs | 156 + .../crates/handlers/src/compat/logout.rs | 132 + .../crates/handlers/src/compat/logout_all.rs | 201 + .../crates/handlers/src/compat/mod.rs | 172 + .../crates/handlers/src/compat/refresh.rs | 175 + .../crates/handlers/src/compat/tests.rs | 233 + .../crates/handlers/src/graphql/mod.rs | 611 + .../src/graphql/model/browser_sessions.rs | 209 + .../src/graphql/model/compat_sessions.rs | 230 + .../handlers/src/graphql/model/cursor.rs | 34 + .../handlers/src/graphql/model/matrix.rs | 41 + .../crates/handlers/src/graphql/model/mod.rs | 137 + .../crates/handlers/src/graphql/model/node.rs | 132 + .../handlers/src/graphql/model/oauth.rs | 201 + .../handlers/src/graphql/model/site_config.rs | 135 + .../src/graphql/model/upstream_oauth.rs | 161 + .../handlers/src/graphql/model/users.rs | 888 + .../src/graphql/model/viewer/anonymous.rs | 18 + .../handlers/src/graphql/model/viewer/mod.rs | 51 + .../src/graphql/mutations/browser_session.rs | 102 + .../src/graphql/mutations/compat_session.rs | 201 + .../handlers/src/graphql/mutations/matrix.rs | 118 + .../handlers/src/graphql/mutations/mod.rs | 90 + .../src/graphql/mutations/oauth2_session.rs | 346 + .../handlers/src/graphql/mutations/user.rs | 986 + .../src/graphql/mutations/user_email.rs | 849 + .../crates/handlers/src/graphql/query/mod.rs | 300 + .../handlers/src/graphql/query/session.rs | 106 + .../src/graphql/query/upstream_oauth.rs | 145 + .../crates/handlers/src/graphql/query/user.rs | 168 + .../handlers/src/graphql/query/viewer.rs | 44 + .../crates/handlers/src/graphql/state.rs | 74 + .../crates/handlers/src/graphql/tests.rs | 1080 + .../crates/handlers/src/health.rs | 41 + .../crates/handlers/src/lib.rs | 512 + .../src/oauth2/authorization/callback.rs | 166 + .../src/oauth2/authorization/consent.rs | 361 + .../handlers/src/oauth2/authorization/mod.rs | 317 + .../handlers/src/oauth2/device/authorize.rs | 227 + .../handlers/src/oauth2/device/consent.rs | 347 + .../crates/handlers/src/oauth2/device/link.rs | 73 + .../crates/handlers/src/oauth2/device/mod.rs | 9 + .../crates/handlers/src/oauth2/discovery.rs | 221 + .../handlers/src/oauth2/introspection.rs | 1302 + .../crates/handlers/src/oauth2/keys.rs | 14 + .../crates/handlers/src/oauth2/mod.rs | 119 + .../handlers/src/oauth2/registration.rs | 619 + .../crates/handlers/src/oauth2/revoke.rs | 469 + .../crates/handlers/src/oauth2/token.rs | 1919 + .../crates/handlers/src/oauth2/userinfo.rs | 165 + .../crates/handlers/src/oauth2/webfinger.rs | 47 + .../crates/handlers/src/passwords.rs | 815 + .../crates/handlers/src/preferred_language.rs | 50 + .../crates/handlers/src/rate_limit.rs | 376 + .../crates/handlers/src/session.rs | 167 + ...rds__tests__hash_verify_and_upgrade-2.snap | 5 + ...rds__tests__hash_verify_and_upgrade-3.snap | 5 + ...words__tests__hash_verify_and_upgrade.snap | 5 + ..._passwords__tests__hashing_argon2id-2.snap | 5 + ...s__passwords__tests__hashing_argon2id.snap | 5 + ...s__passwords__tests__hashing_bcrypt-2.snap | 5 + ...ers__passwords__tests__hashing_bcrypt.snap | 5 + ...s__passwords__tests__hashing_pbkdf2-2.snap | 5 + ...ers__passwords__tests__hashing_pbkdf2.snap | 5 + .../crates/handlers/src/test_utils.rs | 887 + .../handlers/src/upstream_oauth2/authorize.rs | 149 + .../src/upstream_oauth2/backchannel_logout.rs | 319 + .../handlers/src/upstream_oauth2/cache.rs | 534 + .../handlers/src/upstream_oauth2/callback.rs | 501 + .../handlers/src/upstream_oauth2/cookie.rs | 225 + .../handlers/src/upstream_oauth2/link.rs | 2348 ++ .../handlers/src/upstream_oauth2/mod.rs | 143 + .../handlers/src/upstream_oauth2/template.rs | 241 + .../crates/handlers/src/views/app.rs | 89 + .../crates/handlers/src/views/index.rs | 63 + .../crates/handlers/src/views/login.rs | 936 + .../crates/handlers/src/views/logout.rs | 61 + .../crates/handlers/src/views/mod.rs | 13 + .../crates/handlers/src/views/recovery/mod.rs | 8 + .../handlers/src/views/recovery/progress.rs | 156 + .../handlers/src/views/recovery/start.rs | 163 + .../handlers/src/views/register/cookie.rs | 102 + .../crates/handlers/src/views/register/mod.rs | 95 + .../handlers/src/views/register/password.rs | 1046 + .../src/views/register/steps/display_name.rs | 183 + .../src/views/register/steps/finish.rs | 367 + .../handlers/src/views/register/steps/mod.rs | 9 + .../register/steps/registration_token.rs | 202 + .../src/views/register/steps/verify_email.rs | 210 + .../crates/handlers/src/views/shared.rs | 109 + .../crates/http/Cargo.toml | 35 + .../crates/http/src/ext.rs | 51 + .../crates/http/src/lib.rs | 29 + .../crates/http/src/reqwest.rs | 239 + .../crates/i18n-scan/Cargo.toml | 28 + .../crates/i18n-scan/src/key.rs | 118 + .../crates/i18n-scan/src/main.rs | 123 + .../crates/i18n-scan/src/minijinja.rs | 398 + .../crates/i18n/Cargo.toml | 35 + .../crates/i18n/src/lib.rs | 19 + .../crates/i18n/src/sprintf/argument.rs | 143 + .../crates/i18n/src/sprintf/formatter.rs | 598 + .../crates/i18n/src/sprintf/grammar.pest | 75 + .../crates/i18n/src/sprintf/message.rs | 283 + .../crates/i18n/src/sprintf/mod.rs | 209 + .../crates/i18n/src/sprintf/parser.rs | 337 + .../crates/i18n/src/translations.rs | 338 + .../crates/i18n/src/translator.rs | 480 + .../crates/i18n/test_data/en-US.json | 3 + .../crates/i18n/test_data/en.json | 8 + .../crates/i18n/test_data/fr.json | 8 + .../crates/iana-codegen/Cargo.toml | 30 + .../crates/iana-codegen/src/generation.rs | 243 + .../crates/iana-codegen/src/jose.rs | 284 + .../crates/iana-codegen/src/main.rs | 208 + .../crates/iana-codegen/src/oauth.rs | 146 + .../crates/iana-codegen/src/traits.rs | 116 + .../crates/iana/Cargo.toml | 22 + .../crates/iana/src/jose.rs | 1247 + .../crates/iana/src/lib.rs | 38 + .../crates/iana/src/oauth.rs | 548 + .../crates/jose/Cargo.toml | 47 + .../crates/jose/src/base64.rs | 177 + .../crates/jose/src/claims.rs | 879 + .../crates/jose/src/constraints.rs | 252 + .../crates/jose/src/jwa/asymmetric.rs | 508 + .../crates/jose/src/jwa/hmac.rs | 129 + .../crates/jose/src/jwa/mod.rs | 59 + .../crates/jose/src/jwa/signature.rs | 54 + .../crates/jose/src/jwa/symmetric.rs | 129 + .../crates/jose/src/jwk/mod.rs | 573 + .../crates/jose/src/jwk/private_parameters.rs | 359 + .../crates/jose/src/jwk/public_parameters.rs | 346 + .../crates/jose/src/jwt/header.rs | 131 + .../crates/jose/src/jwt/mod.rs | 14 + .../crates/jose/src/jwt/raw.rs | 128 + .../crates/jose/src/jwt/signed.rs | 416 + .../crates/jose/src/lib.rs | 17 + .../crates/jose/tests/generate.py | 143 + .../crates/jose/tests/jws.rs | 225 + .../crates/jose/tests/jwts/eddsa-ed25519.jwt | 1 + .../crates/jose/tests/jwts/eddsa-ed448.jwt | 1 + .../crates/jose/tests/jwts/es256.jwt | 1 + .../crates/jose/tests/jwts/es256k.jwt | 1 + .../crates/jose/tests/jwts/es384.jwt | 1 + .../crates/jose/tests/jwts/es512.jwt | 1 + .../crates/jose/tests/jwts/hs256.jwt | 1 + .../crates/jose/tests/jwts/hs384.jwt | 1 + .../crates/jose/tests/jwts/hs512.jwt | 1 + .../crates/jose/tests/jwts/ps256.jwt | 1 + .../crates/jose/tests/jwts/ps384.jwt | 1 + .../crates/jose/tests/jwts/ps512.jwt | 1 + .../crates/jose/tests/jwts/rs256.jwt | 1 + .../crates/jose/tests/jwts/rs384.jwt | 1 + .../crates/jose/tests/jwts/rs512.jwt | 1 + .../crates/jose/tests/keys/ed25519.priv.pem | 3 + .../crates/jose/tests/keys/ed25519.pub.pem | 3 + .../crates/jose/tests/keys/ed448.priv.pem | 4 + .../crates/jose/tests/keys/ed448.pub.pem | 4 + .../crates/jose/tests/keys/jwks.priv.json | 67 + .../crates/jose/tests/keys/jwks.pub.json | 50 + .../crates/jose/tests/keys/k256.priv.pem | 8 + .../crates/jose/tests/keys/k256.pub.pem | 4 + .../crates/jose/tests/keys/oct.bin | 1 + .../crates/jose/tests/keys/p256.priv.pem | 8 + .../crates/jose/tests/keys/p256.pub.pem | 4 + .../crates/jose/tests/keys/p384.priv.pem | 9 + .../crates/jose/tests/keys/p384.pub.pem | 5 + .../crates/jose/tests/keys/p521.priv.pem | 10 + .../crates/jose/tests/keys/p521.pub.pem | 6 + .../crates/jose/tests/keys/rsa.priv.pem | 27 + .../crates/jose/tests/keys/rsa.pub.pem | 9 + .../tests/snapshots/jws__es256__sign_jwt.snap | 5 + .../snapshots/jws__es256k__sign_jwt.snap | 5 + .../tests/snapshots/jws__es384__sign_jwt.snap | 5 + .../tests/snapshots/jws__ps256__sign_jwt.snap | 5 + .../tests/snapshots/jws__ps384__sign_jwt.snap | 5 + .../tests/snapshots/jws__ps512__sign_jwt.snap | 5 + .../tests/snapshots/jws__rs256__sign_jwt.snap | 5 + .../tests/snapshots/jws__rs384__sign_jwt.snap | 5 + .../tests/snapshots/jws__rs512__sign_jwt.snap | 5 + .../crates/keystore/Cargo.toml | 45 + .../crates/keystore/src/encrypter.rs | 94 + .../crates/keystore/src/lib.rs | 642 + .../crates/keystore/tests/generate.sh | 34 + .../keystore/tests/keys/ec-k256.pkcs8.der | Bin 0 -> 135 bytes .../tests/keys/ec-k256.pkcs8.encrypted.der | Bin 0 -> 239 bytes .../tests/keys/ec-k256.pkcs8.encrypted.pem | 7 + .../keystore/tests/keys/ec-k256.pkcs8.pem | 5 + .../keystore/tests/keys/ec-k256.sec1.der | Bin 0 -> 118 bytes .../keystore/tests/keys/ec-k256.sec1.pem | 5 + .../keystore/tests/keys/ec-p256.pkcs8.der | Bin 0 -> 138 bytes .../tests/keys/ec-p256.pkcs8.encrypted.der | Bin 0 -> 239 bytes .../tests/keys/ec-p256.pkcs8.encrypted.pem | 7 + .../keystore/tests/keys/ec-p256.pkcs8.pem | 5 + .../keystore/tests/keys/ec-p256.sec1.der | Bin 0 -> 121 bytes .../keystore/tests/keys/ec-p256.sec1.pem | 5 + .../keystore/tests/keys/ec-p384.pkcs8.der | Bin 0 -> 185 bytes .../tests/keys/ec-p384.pkcs8.encrypted.der | Bin 0 -> 288 bytes .../tests/keys/ec-p384.pkcs8.encrypted.pem | 8 + .../keystore/tests/keys/ec-p384.pkcs8.pem | 6 + .../keystore/tests/keys/ec-p384.sec1.der | Bin 0 -> 167 bytes .../keystore/tests/keys/ec-p384.sec1.pem | 6 + .../tests/keys/ec256.pkcs8.encrypted.pem | 7 + .../crates/keystore/tests/keys/rsa.pkcs1.der | Bin 0 -> 1191 bytes .../crates/keystore/tests/keys/rsa.pkcs1.pem | 27 + .../crates/keystore/tests/keys/rsa.pkcs8.der | Bin 0 -> 1217 bytes .../tests/keys/rsa.pkcs8.encrypted.der | Bin 0 -> 1329 bytes .../tests/keys/rsa.pkcs8.encrypted.pem | 30 + .../crates/keystore/tests/keys/rsa.pkcs8.pem | 28 + .../crates/keystore/tests/keystore.rs | 210 + .../keystore__generate_sign_and_verify-2.snap | 10 + .../keystore__generate_sign_and_verify-3.snap | 11 + .../keystore__generate_sign_and_verify-4.snap | 10 + .../keystore__generate_sign_and_verify-5.snap | 21 + .../keystore__generate_sign_and_verify.snap | 32 + .../tests/snapshots/keystore__jwt_ES256.snap | 5 + .../tests/snapshots/keystore__jwt_ES256K.snap | 5 + .../tests/snapshots/keystore__jwt_ES384.snap | 5 + .../tests/snapshots/keystore__jwt_PS256.snap | 5 + .../tests/snapshots/keystore__jwt_PS384.snap | 5 + .../tests/snapshots/keystore__jwt_PS512.snap | 5 + .../tests/snapshots/keystore__jwt_RS256.snap | 5 + .../tests/snapshots/keystore__jwt_RS384.snap | 5 + .../tests/snapshots/keystore__jwt_RS512.snap | 5 + .../crates/listener/Cargo.toml | 45 + .../listener/examples/demo/certs/ca-key.pem | 27 + .../listener/examples/demo/certs/ca.csr | 17 + .../listener/examples/demo/certs/ca.json | 16 + .../listener/examples/demo/certs/ca.pem | 22 + .../examples/demo/certs/client-key.pem | 27 + .../listener/examples/demo/certs/client.csr | 17 + .../listener/examples/demo/certs/client.json | 18 + .../listener/examples/demo/certs/client.pem | 23 + .../listener/examples/demo/certs/config.json | 25 + .../listener/examples/demo/certs/gen.sh | 15 + .../examples/demo/certs/server-key.pem | 27 + .../listener/examples/demo/certs/server.csr | 17 + .../listener/examples/demo/certs/server.json | 18 + .../listener/examples/demo/certs/server.pem | 23 + .../crates/listener/examples/demo/main.rs | 106 + .../crates/listener/src/lib.rs | 49 + .../crates/listener/src/maybe_tls.rs | 223 + .../listener/src/proxy_protocol/acceptor.rs | 60 + .../listener/src/proxy_protocol/maybe.rs | 66 + .../crates/listener/src/proxy_protocol/mod.rs | 15 + .../crates/listener/src/proxy_protocol/v1.rs | 296 + .../crates/listener/src/rewind.rs | 151 + .../crates/listener/src/server.rs | 455 + .../crates/listener/src/unix_or_tcp.rs | 320 + .../crates/matrix-synapse/Cargo.toml | 31 + .../crates/matrix-synapse/src/error.rs | 79 + .../crates/matrix-synapse/src/legacy.rs | 688 + .../crates/matrix-synapse/src/lib.rs | 11 + .../crates/matrix-synapse/src/modern.rs | 567 + .../crates/matrix/Cargo.toml | 23 + .../crates/matrix/src/lib.rs | 556 + .../crates/matrix/src/mock.rs | 282 + .../crates/matrix/src/readonly.rs | 100 + .../crates/oauth2-types/Cargo.toml | 37 + .../crates/oauth2-types/src/errors.rs | 619 + .../crates/oauth2-types/src/lib.rs | 33 + .../crates/oauth2-types/src/oidc.rs | 1819 + .../crates/oauth2-types/src/pkce.rs | 168 + .../src/registration/client_metadata_serde.rs | 501 + .../oauth2-types/src/registration/mod.rs | 1423 + .../crates/oauth2-types/src/requests.rs | 1060 + .../crates/oauth2-types/src/response_type.rs | 505 + .../crates/oauth2-types/src/scope.rs | 266 + .../crates/oauth2-types/src/test_utils.rs | 22 + .../crates/oauth2-types/src/webfinger.rs | 92 + .../crates/oidc-client/Cargo.toml | 53 + .../crates/oidc-client/src/error.rs | 312 + .../crates/oidc-client/src/lib.rs | 66 + .../src/requests/authorization_code.rs | 462 + .../src/requests/client_credentials.rs | 67 + .../oidc-client/src/requests/discovery.rs | 94 + .../crates/oidc-client/src/requests/jose.rs | 207 + .../crates/oidc-client/src/requests/mod.rs | 15 + .../oidc-client/src/requests/refresh_token.rs | 119 + .../crates/oidc-client/src/requests/token.rs | 67 + .../oidc-client/src/requests/userinfo.rs | 105 + .../src/types/client_credentials.rs | 369 + .../crates/oidc-client/src/types/mod.rs | 22 + .../crates/oidc-client/tests/it/main.rs | 153 + .../tests/it/requests/authorization_code.rs | 354 + .../tests/it/requests/client_credentials.rs | 99 + .../tests/it/requests/discovery.rs | 91 + .../oidc-client/tests/it/requests/jose.rs | 242 + .../oidc-client/tests/it/requests/mod.rs | 12 + .../tests/it/requests/refresh_token.rs | 90 + .../oidc-client/tests/it/requests/userinfo.rs | 39 + .../tests/it/types/client_credentials.rs | 343 + .../crates/oidc-client/tests/it/types/mod.rs | 7 + .../crates/policy/Cargo.toml | 34 + .../crates/policy/src/bin/schema.rs | 47 + .../crates/policy/src/lib.rs | 725 + .../crates/policy/src/model.rs | 243 + .../crates/router/Cargo.toml | 24 + .../crates/router/src/endpoints.rs | 963 + .../crates/router/src/lib.rs | 44 + .../crates/router/src/traits.rs | 54 + .../crates/router/src/url_builder.rs | 305 + .../crates/spa/Cargo.toml | 22 + .../crates/spa/src/lib.rs | 13 + .../crates/spa/src/vite.rs | 206 + ...fd9af472689d5aef7c1c4b1c594ca57c02237.json | 16 + ...c4c3deaa01b5aaa091d3a3487caf3e2634daf.json | 17 + ...339b3fcea95ba59395106318366a6ef432d85.json | 16 + ...299db5b617f52f6c24bcca0a24c0c185c4478.json | 16 + ...86c6a5745e523b76e1083d6bfced0035c2f76.json | 15 + ...67c00c8781f54054a84b3f3005b65cbc2a14a.json | 17 + ...56577d98074e244a35c0d3be24bc18d9d0daa.json | 15 + ...6c35c9c236ea8beb6696e5740fa45655e59f3.json | 15 + ...0ba438b6b18306dfe1454fa4124c0207b3deb.json | 30 + ...79bfc50e22cb12ddf7495c7b0fedca61f9421.json | 17 + ...63aa46225245a04d1c7bc24b5275c44a6d58d.json | 15 + ...ea304d43c336ce80723789ff3e66c0dd4d86c.json | 46 + ...550e4e12d1778474aba72762d9aa093d21ee2.json | 20 + ...0f3f086e4e62c6de9d6864a6a11a2470ebe62.json | 15 + ...3c1de5910c7a46e7fe52d1fb3bfd5561ac320.json | 15 + ...d64d52804848df378dc74f8f54ec4404e094e.json | 15 + ...3353004f458c85f7b4f361802f86651900fbc.json | 15 + ...d45586cdb0c20b98393f89036fbf0f1d2dee2.json | 19 + ...b1ab4aaf128fed6be67ca0f139d697614c63b.json | 15 + ...0a9b7b920d807a4b6c235e1bee524cd73b266.json | 14 + ...a991d2471667cf2981770447cde6fd025fbb7.json | 15 + ...e561e6521c45ce07d3a42411984c9a6b75fdc.json | 14 + ...e05461c3c1f43f966fee3a80ae42540783f08.json | 58 + ...4dc8e678278121efbe1f66bcdc24144d684d0.json | 20 + ...0d0e4e9e1fd1f9ea53b7a359c9025d7304223.json | 15 + ...e3847e376e443ccd841f76b17a81f53fafc3a.json | 15 + ...3361bb85911c48f7db6c3873b0f5abf35940b.json | 14 + ...ff670f1b4ba3c56605c53e2b905d7ec38c8be.json | 14 + ...21657559d9b77fd931f972ce4d9f03a57f97a.json | 30 + ...cf0963f92b6d15e6af0e69378a6447dee677c.json | 14 + ...6a38f3b244c2f46444c0ab345de7feff54aba.json | 20 + ...debaedf37437f3bd4f796f5581fab997587d7.json | 15 + ...fddbaeedcbbc6963ee573409bfc98e57de6ed.json | 18 + ...e2777f9327708b450d048638a162343478cc6.json | 30 + ...1ffe11da64835ae297c9277271b8971d5de81.json | 36 + ...d43cfd962c729298ad07ee1ade2f2880c0eb3.json | 142 + ...4cbd866cfc2918ca4b1741b5687f21cfe273b.json | 40 + ...ab747a469404533f59ff6fbd56e9eb5ad38e1.json | 14 + ...b2e83858c9944893b8f3a0f0131e8a9b7a494.json | 14 + ...1d78a7f026e8311bdc7d5ccc2f39d962e898f.json | 14 + ...10a8ca899640e78cc8f5b03168622928ffe94.json | 14 + ...9bb249b18ced57d6a4809dffc23972b3e9423.json | 16 + ...668f64b76975dba07e67d04ed7a52e2e8107f.json | 15 + ...83b80c9620e1825ab07ab6c52f3f1a32d2527.json | 16 + ...726c0a4f89f65b4dc3e33ddad58aabf6b148b.json | 52 + ...b924dc20785c91770ed43d62df2e590e8da71.json | 46 + ...c0737c0d20799b497089a566e2ff704d56b67.json | 24 + ...318aa7346917e5c8a37bb0f5b2b3067588009.json | 30 + ...baffb050b2f475ae106155c2e2f210a81191a.json | 15 + ...0544dbf1493cad31a21c0cd7ddb57ed12de16.json | 40 + ...d5f1a8ec44ec8ccccad2d7fce9ac855209883.json | 15 + ...6141752ba90d36467693a68318573171d57b0.json | 34 + ...a0f24afb3d6a1925a80a1b6d35b9a8258a0ce.json | 58 + ...cf69eb67adaa4d40fc1910dfcd2640e32ab37.json | 16 + ...73e81d9b75e90929af80961f8b5910873a43e.json | 14 + ...34d222975d084d0a9ebe7f1b6b865ab2e09ef.json | 23 + ...268818dc84c37b168ab45e582e0a727796a06.json | 30 + ...1bd993088526923769a9147281686c2d47591.json | 46 + ...e72ecb536092b46c92a7dda01598962842323.json | 53 + ...46beca29964733338ea4fec2a29393f031c4f.json | 16 + ...928e8eebd99036255b62d688ac02b5bd74b40.json | 94 + ...727a305cbe4029cd4cebd5ecc274e3e32f533.json | 16 + ...30554dc067d0a6cad963dd7e0c66a80b342bf.json | 16 + ...d25fe794bc0b69c5938275711faa7a80b811f.json | 14 + ...7559da26def223b8954cf32959cce777577d7.json | 24 + ...aa6515f0fc9eb54075e8d6d15520d25b75172.json | 17 + ...2594c97ba0afe972f0fee145b6094789fb6c7.json | 40 + ...0dc74505b22c681322bd99b62c2a540c6cd35.json | 15 + ...5b76ff901c183b314f8ccb5018d70c516abf6.json | 14 + ...e5cc5119a7d6e7fe784112703c0406f28300f.json | 52 + ...52b5efcc103c3383fa68b552295e2289d1f55.json | 17 + ...5e42662e9854430bcd9e53598debf99c9ca37.json | 24 + ...7093589f7d1b2b8458521b635546b8012041e.json | 16 + ...0e08bab57721007c64ef2597cb09a62100792.json | 166 + ...7a709b06455e08b9fd75cc08f142070f330b3.json | 15 + ...4d55cbfb01492985ac2af5a1ad4af9b3ccc77.json | 15 + ...11e41d86f22b3fa899a952cad00129e59bee6.json | 82 + ...5b531b9873f4139eadcbf1450e726b9a27379.json | 15 + ...16d3b08fd5d736ecf36845e6fd4bfc515b2cf.json | 64 + ...205f636568c990ccb05cf9208750ad1330b9b.json | 24 + ...db550dee05c44c9d93f53df95fe3b4a840347.json | 17 + ...dabe674ea853e0d47eb5c713705cb0130c758.json | 12 + ...39fb7f81015f7ceef61ecaadba64521895cff.json | 19 + ...c1388d6723f82549d88d704d9c939b9d35c49.json | 49 + ...fb2254d1bf9e8a97cd7d32ba789c740e0fbdb.json | 15 + ...a8459736fdd4bbffd932f7930d847f2c3ef5d.json | 16 + ...983113e7d1bc565d01e85c158856abb17ddc6.json | 100 + ...d3313a02886994cfff0690451229fb5ae2f77.json | 46 + ...0c999d601f3a9730e6bbb40cfc43c04195c61.json | 52 + ...59fe609112afbabcbfcc0e7f96c1e531b6567.json | 28 + ...6268efd13d7af3cecb452168d514a379fec30.json | 30 + ...9521b66eaafa51f73bf2f95e38b8f3b64a229.json | 15 + ...1410b4b7b4af441f0a138c5421d1111cb9f79.json | 52 + ...61b07f43ab168956470d120166ed7eab631d9.json | 15 + ...ee92abf78dfbdf1a25e58a2bc9c14be8035f0.json | 16 + ...58659347a1bf71dd62335df046708f19c967e.json | 22 + ...2e45a6eba981a0d621665ed8f8b60354b3389.json | 19 + ...19359913b8a934ca8a642b7bb43c9a7a58a6d.json | 15 + ...ade59f478b0b096b4bc90c89fb9c26b467dd2.json | 15 + ...3745cc1c57bbe3c3c69110592a07400116c7f.json | 58 + ...16c5a4f48e2be87dc26e9d0e3a932c9c49dfb.json | 100 + ...cd711eb78a4d39608e897432d6124cd135938.json | 18 + ...65bea6b799a43cf4c9264a37d392847e6eff0.json | 14 + ...0ee8fca86b5cdce9320e190e3d3b8fd9f63bc.json | 15 + ...0038e9d00492b1e282237c0ec0e03bc36a9c0.json | 19 + ...abecc18c36d8cdba8db3b47953855fa5c9035.json | 130 + ...47e411b96b2376288a90c242034295e1a147e.json | 24 + ...61540441b14c8206038fdc4a4336bbae3f382.json | 17 + ...a8b231f9e7c211ab83487536008e48316c269.json | 15 + ...6f5c701411387c939f6b8a3478b41b3de4f20.json | 46 + ...fef7d69d6ce0be1f930d9f16c50b921a8b819.json | 17 + ...014c6856229918b972b98946f98b75686ab6c.json | 14 + ...f79832e5d0748dad18ab44c6671f3196d6f60.json | 30 + ...71fb1ed3dafe2bd1a49aa72e4f4862931c6c2.json | 16 + ...ead15df5e0a4209ff47dcf4a5f19d35154e89.json | 64 + ...327d03b29fe413d57cce21c67b6d539f59e7d.json | 15 + ...5903ac99d9bb8ca4d79c908b25a6d1209b9b1.json | 14 + ...344dfab024b42e47ddc7bd9e551897ba6e9b8.json | 164 + ...79cb16f9f8df08fa258cc907007fb9bcd0bc7.json | 82 + ...90082672b3782875cf3f5ba0b2f9d26e3a507.json | 17 + ...982ad01035296bf4539ca5620a043924a7292.json | 15 + ...9afe24f01d9ec34609f373db5c535ccb58516.json | 18 + ...f5b8c297d055abe248cc876dbc12c5a7dc920.json | 18 + ...76eb57ccb6e4053ab8f4450dd4a9d1f6ba108.json | 32 + ...7e260ba8911123744980e24a52bc9b95bd056.json | 18 + ...18705276bce41e9b19d5d7e910ad4b767fb5e.json | 18 + ...f61efbeb1ec24e2c694e1673347bae993762d.json | 24 + ...ddd9d3bc8c99b5b8b1d373939fc3ae9715c27.json | 15 + ...e63041fca7cbab407f98452462ec45e3cfd16.json | 64 + ...a009785f358b334f5c586c2e358f0d0b4d856.json | 64 + ...763eed0582264861436eab3f862e3eb12cab1.json | 18 + ...99b7d0b690a2471873c6654b1b6cf2079b95c.json | 15 + ...1f91acc6076a8521adc3e30a83bf70e2121a0.json | 47 + ...9d378a2f11b2a27bbd86d60558318c87eb698.json | 18 + ...86157d96866f7ef9b09b03a45ba4322664bd0.json | 15 + ...1337d69aebad12be6fbfbdde91e34083ba4ed.json | 16 + ...c05a7ed582c9e5c1f65d27b0686f843ccfe42.json | 16 + ...e34b7e457f857ed37c467c314142877fd5367.json | 15 + ...6691f260a7eca4bf494d6fb11c7cf699adaad.json | 16 + ...2eaf93e1c44108b6770a24c9a24ac29db37d3.json | 100 + ...cb47fb628dabd8fadc50e6a5772903f851e1c.json | 14 + ...3ac2a7cedfd4247b57f062d348b4b1b36bef1.json | 142 + ...314d05ceb3a7b8f35397c0faef3b36d2d14a7.json | 100 + ...f76a9ba3e2ed3539ef16accb601fb609c2ec9.json | 15 + ...9fab6ea87589819128e6fc9ed5da11dfc2770.json | 58 + ...a75d18e914f823902587b63c9f295407144b1.json | 15 + ...d1f1a1766ff865f2e08a5daa095d2a1ccbd56.json | 15 + ...979f3c78c2caca52cb4b8dc9880e669a1f23e.json | 130 + ...e6a4f61a6a2001b597e4c63ba4588ec5cf530.json | 22 + ...473215f4d3c549ea2c5a4f860a102cc46a667.json | 41 + ...7e9d2f127cb15b1bb650d1ea3805a4c55b196.json | 14 + ...1f1a5e6470cef581b1638f5578546dd28c4df.json | 64 + ...f6e963096edb610183ba13cbbbd3d95c4134b.json | 30 + ...f50be3dfdac0fbfb1e8a0204c0c9c103ba5b0.json | 142 + ...bc38395ab6c6fdf979616fa16fc490897cee3.json | 46 + ...c8c30ed19e9cebd0c8e7c7918b1c3ca0e3cba.json | 64 + ...d83e7cffa26b320ae1442c7f3e22376c4a4ee.json | 16 + ...c554d63c8773040679a77e99ac3fa24cec71d.json | 24 + ...00847f3379f7b57c0972659a35bfb68b0f6cd.json | 19 + ...4a359aea3a46e5d4e07764cec56ce5d6609c0.json | 30 + ...6fcc9a684609956972fe4f95aac13f30b2530.json | 30 + ...f91ac0b326dd751c0d374d6ef4d19f671d22e.json | 24 + ...2d453ab449017ed492bf6711dc7fceb630880.json | 15 + ...0034201575a49e0240ac6715b55ad8d381d0e.json | 14 + ...0096c33df9e30611ef89d01bf0502119cbbe1.json | 15 + ...5767e2150f7da3cf384a7f44741c769f44144.json | 40 + ...c1dddae3f1dfb1ef48494f6aee3fd03abe6f6.json | 24 + ...e1ef2f192ca66f8000d1385626154e5ce4f7e.json | 46 + ...8e601f12c8003fe93a5ecb110d02642d14c3c.json | 18 + ...612e4185cef838f105d66f18cb24865e45140.json | 52 + ...273e7ed0770c81071cf1f17516d3a45881ae9.json | 100 + ...c1bd60bb771c6f075df15ab0137a7ffc896da.json | 22 + ...3cfa33a0bf3328df6e869464fe7f31429b81e.json | 16 + ...9bac77b09f67e623d5371ab4dadbe2d41fa1c.json | 20 + ...320760971317c4519fae7af9d44e2be50985d.json | 15 + ...4526a54ebbed8eef8f885f3a6723bc8490908.json | 14 + ...426603c84df4ef1c6ce5dc441a63d2dc46f6e.json | 17 + ...bdeab3c28a56a395f52f001a7bb013a5dfece.json | 58 + ...ca42c790c101a3fc9442862b5885d5116325a.json | 16 + ...bf34f8aa9ea8ed50929731845e32dc3176e39.json | 19 + ...bc9991135065e81af8f77b5beef9405607577.json | 14 + ...c0f594332be5829d5d7c6b66183ac25b3d166.json | 20 + ...13cbb364b3505626d34550e38f6f7397b9d42.json | 140 + ...066dcefebe978a7af477bb6b55faa1d31e9b1.json | 64 + ...9c715560d011d4c01112703a9c046170c84f1.json | 32 + ...729681d450669563dd1178c492ffce51e5ff2.json | 76 + ...b06a842cabc787279ba7d690f69b59ad3eb50.json | 20 + ...9f3005b55c654897a8e46dc933c7fd2263c7c.json | 12 + ...2980b5fe575ae8432a000e9c4e4307caa2d9b.json | 16 + .../crates/storage-pg/Cargo.toml | 44 + .../crates/storage-pg/build.rs | 10 + .../20220530084123_jobs_workers.sql | 83 + .../migrations/20221018142001_init.sql | 351 + .../20221121151402_upstream_oauth.sql | 91 + .../20221213145242_password_schemes.sql | 24 + .../20230408234928_add_get_jobs_fn_.sql | 27 + .../20230616093555_compat_admin_flag.sql | 16 + ...21140528_upstream_oauth_claims_imports.sql | 19 + .../20230626130338_oauth_clients_static.sql | 19 + .../migrations/20230728154304_user_lock.sql | 19 + .../20230823125247_drop_apalis_push_job.sql | 53 + ...30828085439_oauth2_clients_more_fields.sql | 32 + ...553_user_session_authentication_source.sql | 23 + ...920_oauth2_sessions_user_id_scope_list.sql | 49 + ...20230829141928_user_session_user_agent.sql | 16 + ...135550_oauth2_client_credentials_grant.sql | 21 + ...20230911091636_oauth2_token_expiration.sql | 19 + ...919155444_record_session_last_activity.sql | 39 + .../20231009142904_user_can_request_admin.sql | 17 + ...0231116104353_upstream_oauth_overrides.sql | 21 + ...20231120110559_upstream_oauth_branding.sql | 18 + ...20231207090532_oauth_device_code_grant.sql | 82 + ...155602_oauth_clients_device_code_grant.sql | 18 + .../migrations/20240207100003_user_terms.sql | 32 + ...40220141353_nonunique_compat_device_id.sql | 17 + ...201_compat_sessions_user_sessions_link.sql | 19 + .../20240221164945_sessions_user_agent.sql | 17 + ...1_upstream_oauth_additional_parameters.sql | 18 + ...40402084854_upstream_oauth_disabled_at.sql | 18 + .../20240621080509_user_recovery.sql | 64 + .../20240718075125_sessions_active_index.sql | 19 + .../20241004075132_queue_worker.sql | 37 + .../migrations/20241004121132_queue_job.sql | 79 + .../20241007160050_oidc_login_hint.sql | 3 + ...15163340_upstream_oauth2_response_mode.sql | 8 + ...314_upstream_oauth2_extra_query_params.sql | 9 + .../20241120163320_queue_job_failures.sql | 17 + .../20241122130349_queue_job_scheduled.sql | 11 + ...241122133435_queue_job_scheduled_index.sql | 9 + ...20241124145741_upstream_oauth_userinfo.sql | 13 + .../20241125110803_queue_job_recurrent.sql | 25 + ...1057_upstream_oauth2_link_account_name.sql | 9 + ...202123523_upstream_oauth_responses_alg.sql | 10 + ...0115428_oauth_refresh_token_track_next.sql | 9 + ...0133651_oauth2_access_token_first_used.sql | 8 + ...241212154426_oauth2_response_mode_null.sql | 7 + ...3180524_upstream_oauth_optional_issuer.sql | 8 + ...105709_user_email_authentication_codes.sql | 29 + .../20250113102144_user_registrations.sql | 49 + ...35939_allow_deviceless_compat_sessions.sql | 7 + ...250115155255_cleanup_unverified_emails.sql | 14 + ...0124151529_unsupported_threepids_table.sql | 30 + ...0129154003_compat_sessions_device_name.sql | 9 + .../20250130170011_user_is_guest.sql | 10 + .../20250225091000_dynamic_policy_data.sql | 15 + .../20250311093145_user_deactivated_at.sql | 8 + ...094013_upstream_oauth2_providers_order.sql | 9 + ...803_upstream_oauth_session_unlinked_at.sql | 7 + .../20250325102310_oauth2_clients_hash.sql | 13 + ...05103_compat_sso_login_browser_session.sql | 23 + ...00_idx_compat_access_tokens_session_fk.sql | 9 + ...1_idx_compat_refresh_tokens_session_fk.sql | 9 + ..._compat_refresh_tokens_access_token_fk.sql | 9 + ...0410000003_idx_compat_sessions_user_fk.sql | 13 + ...04_idx_compat_sessions_user_session_fk.sql | 9 + ...compat_sessions_user_id_last_active_at.sql | 8 + ...00006_idx_compat_sso_logins_session_fk.sql | 9 + ...07_idx_oauth2_access_tokens_session_fk.sql | 9 + ...oauth2_authorization_grants_session_fk.sql | 9 + ..._oauth2_authorization_grants_client_fk.sql | 9 + ...10000010_idx_oauth2_consents_client_fk.sql | 9 + ...0410000011_idx_oauth2_consents_user_fk.sql | 9 + ...dx_oauth2_device_code_grants_client_fk.sql | 9 + ...x_oauth2_device_code_grants_session_fk.sql | 9 + ...th2_device_code_grants_user_session_fk.sql | 9 + ...5_idx_oauth2_refresh_tokens_session_fk.sql | 9 + ..._oauth2_refresh_tokens_access_token_fk.sql | 9 + ...2_refresh_tokens_next_refresh_token_fk.sql | 9 + ...18_idx_oauth2_sessions_user_session_fk.sql | 9 + ...10000019_idx_oauth2_sessions_client_fk.sql | 9 + ...0410000020_idx_oauth2_sessions_user_fk.sql | 13 + ...oauth2_sessions_user_id_last_active_at.sql | 8 + ...410000022_idx_queue_jobs_started_by_fk.sql | 9 + ...0000023_idx_queue_jobs_next_attempt_fk.sql | 9 + ...000024_idx_queue_jobs_schedule_name_fk.sql | 9 + ...uth_authorization_sessions_provider_fk.sql | 9 + ...m_oauth_authorization_sessions_link_fk.sql | 9 + ...7_idx_upstream_oauth_links_provider_fk.sql | 9 + ...00028_idx_upstream_oauth_links_user_fk.sql | 9 + ...authentication_codes_authentication_fk.sql | 9 + ..._email_authentications_user_session_fk.sql | 9 + ...l_authentications_user_registration_fk.sql | 9 + ...20250410000032_idx_user_emails_user_fk.sql | 9 + ...250410000033_idx_user_emails_email_idx.sql | 10 + ...50410000034_idx_user_passwords_user_fk.sql | 9 + ...5_idx_user_recovery_tickets_session_fk.sql | 9 + ...dx_user_recovery_tickets_user_email_fk.sql | 9 + ..._registrations_email_authentication_fk.sql | 9 + ...ession_authentications_user_session_fk.sql | 9 + ...ssion_authentications_user_password_fk.sql | 9 + ...hentications_upstream_oauth_session_fk.sql | 9 + ...250410000041_idx_user_sessions_user_fk.sql | 13 + ...p_user_sessions_user_id_last_active_at.sql | 8 + .../20250410000043_idx_user_terms_user_fk.sql | 9 + ...50410000044_idx_users_primary_email_fk.sql | 11 + ...5_idx_user_recovery_tickets_ticket_idx.sql | 10 + ...0250410121612_users_lower_username_idx.sql | 10 + ...authorization_default_requires_consent.sql | 9 + .../20250424150930_oauth2_grants_locale.sql | 8 + ...250425113717_oauth2_session_human_name.sql | 8 + ...158_upstream_oauth2_forward_login_hint.sql | 8 + ..._upstream_oauth_session_optional_nonce.sql | 8 + ...0250602212100_user_registration_tokens.sql | 57 + ...0602212101_idx_user_registration_token.sql | 9 + ...212102_upstream_oauth2_id_token_claims.sql | 8 + ...m_oauth2_id_token_claims_sub_sid_index.sql | 15 + ...m_oauth2_id_token_claims_sid_sub_index.sql | 15 + ...3_upstream_oauth_on_backchannel_logout.sql | 10 + ...0708155857_idx_user_emails_lower_email.sql | 11 + ...20250709142230_id_token_claims_trigger.sql | 51 + ...0250709142240_backfill_id_token_claims.sql | 22 + .../20250915092000_pgtrgm_extension.sql | 10 + ...20250915092635_users_username_trgm_idx.sql | 10 + .../20250924132713_personal_access_tokens.sql | 68 + ...4634_personal_access_tokens_unique_fix.sql | 14 + ...er_registration_upstream_oauth_session.sql | 10 + ...egistration_upstream_oauth_session_idx.sql | 9 + .../20260108111542_remove_apalis.sql | 14 + ...30_remove_user_emails_old_confirmation.sql | 18 + ...20260108121127_cleanup_oauth2_consents.sql | 18 + ...121952_cleanup_id_token_claims_trigger.sql | 11 + ...move_deactivated_unsupported_threepids.sql | 13 + .../20260108145240_drop_oauth2_consents.sql | 9 + ...627_oauth_access_tokens_revoked_at_idx.sql | 9 + ...009_oauth_access_tokens_expires_at_idx.sql | 9 + ...9172537_oauth_refresh_token_revoked_at.sql | 9 + ...auth_refresh_token_next_token_set_null.sql | 15 + ...esh_token_next_token_set_null_validate.sql | 9 + ...0_oauth_refresh_token_not_consumed_idx.sql | 11 + ...37_oauth_refresh_token_consumed_at_idx.sql | 11 + ...111313_idx_compat_sessions_finished_at.sql | 11 + ...0002_idx_upstream_oauth_links_orphaned.sql | 10 + ...00003_queue_jobs_next_attempt_set_null.sql | 14 + ...ue_jobs_next_attempt_set_null_validate.sql | 10 + ...3025_upstream_oauth_track_user_session.sql | 11 + ...4214_upstream_auth_user_session_fk_idx.sql | 11 + ...1_upstream_oauth_sessions_orphan_index.sql | 10 + ...tream_oauth_track_user_session_trigger.sql | 27 + ...ream_oauth_track_user_session_backfill.sql | 13 + ...compat_sessions_user_session_no_action.sql | 19 + ..._compat_sessions_user_session_validate.sql | 9 + ...123211_idx_oauth2_sessions_finished_at.sql | 11 + ...22124231_idx_user_sessions_finished_at.sql | 11 + ...90000_idx_oauth2_sessions_inactive_ips.sql | 10 + ...90001_idx_compat_sessions_inactive_ips.sql | 10 + ...3090002_idx_user_sessions_inactive_ips.sql | 10 + .../crates/storage-pg/src/app_session.rs | 786 + .../storage-pg/src/compat/access_token.rs | 210 + .../crates/storage-pg/src/compat/mod.rs | 753 + .../storage-pg/src/compat/refresh_token.rs | 241 + .../crates/storage-pg/src/compat/session.rs | 814 + .../crates/storage-pg/src/compat/sso_login.rs | 483 + .../crates/storage-pg/src/errors.rs | 136 + .../crates/storage-pg/src/filter.rs | 57 + .../crates/storage-pg/src/iden.rs | 210 + .../crates/storage-pg/src/lib.rs | 483 + .../storage-pg/src/oauth2/access_token.rs | 366 + .../src/oauth2/authorization_grant.rs | 502 + .../crates/storage-pg/src/oauth2/client.rs | 851 + .../src/oauth2/device_code_grant.rs | 523 + .../crates/storage-pg/src/oauth2/mod.rs | 947 + .../storage-pg/src/oauth2/refresh_token.rs | 400 + .../crates/storage-pg/src/oauth2/session.rs | 708 + .../crates/storage-pg/src/pagination.rs | 65 + .../storage-pg/src/personal/access_token.rs | 253 + .../crates/storage-pg/src/personal/mod.rs | 422 + .../crates/storage-pg/src/personal/session.rs | 702 + .../crates/storage-pg/src/policy_data.rs | 205 + .../crates/storage-pg/src/queue/job.rs | 498 + .../crates/storage-pg/src/queue/mod.rs | 10 + .../crates/storage-pg/src/queue/schedule.rs | 86 + .../crates/storage-pg/src/queue/worker.rs | 259 + .../crates/storage-pg/src/repository.rs | 363 + .../crates/storage-pg/src/telemetry.rs | 31 + .../crates/storage-pg/src/tracing.rs | 33 + .../storage-pg/src/upstream_oauth2/link.rs | 502 + .../storage-pg/src/upstream_oauth2/mod.rs | 668 + .../src/upstream_oauth2/provider.rs | 984 + .../storage-pg/src/upstream_oauth2/session.rs | 621 + .../crates/storage-pg/src/user/email.rs | 817 + .../crates/storage-pg/src/user/mod.rs | 612 + .../crates/storage-pg/src/user/password.rs | 149 + .../crates/storage-pg/src/user/recovery.rs | 379 + .../storage-pg/src/user/registration.rs | 1067 + .../storage-pg/src/user/registration_token.rs | 960 + .../crates/storage-pg/src/user/session.rs | 751 + .../crates/storage-pg/src/user/terms.rs | 74 + .../crates/storage-pg/src/user/tests.rs | 890 + .../crates/storage/Cargo.toml | 36 + .../crates/storage/src/app_session.rs | 224 + .../crates/storage/src/compat/access_token.rs | 113 + .../crates/storage/src/compat/mod.rs | 19 + .../storage/src/compat/refresh_token.rs | 130 + .../crates/storage/src/compat/session.rs | 479 + .../crates/storage/src/compat/sso_login.rs | 285 + .../crates/storage/src/lib.rs | 127 + .../crates/storage/src/oauth2/access_token.rs | 198 + .../storage/src/oauth2/authorization_grant.rs | 198 + .../crates/storage/src/oauth2/client.rs | 254 + .../storage/src/oauth2/device_code_grant.rs | 260 + .../crates/storage/src/oauth2/mod.rs | 23 + .../storage/src/oauth2/refresh_token.rs | 206 + .../crates/storage/src/oauth2/session.rs | 591 + .../crates/storage/src/pagination.rs | 237 + .../storage/src/personal/access_token.rs | 140 + .../crates/storage/src/personal/mod.rs | 16 + .../crates/storage/src/personal/session.rs | 398 + .../crates/storage/src/policy_data.rs | 76 + .../crates/storage/src/queue/job.rs | 409 + .../crates/storage/src/queue/mod.rs | 18 + .../crates/storage/src/queue/schedule.rs | 57 + .../crates/storage/src/queue/tasks.rs | 653 + .../crates/storage/src/queue/worker.rs | 128 + .../crates/storage/src/repository.rs | 674 + .../storage/src/upstream_oauth2/link.rs | 286 + .../crates/storage/src/upstream_oauth2/mod.rs | 20 + .../storage/src/upstream_oauth2/provider.rs | 328 + .../storage/src/upstream_oauth2/session.rs | 288 + .../crates/storage/src/user/email.rs | 446 + .../crates/storage/src/user/mod.rs | 402 + .../crates/storage/src/user/password.rs | 71 + .../crates/storage/src/user/recovery.rs | 192 + .../crates/storage/src/user/registration.rs | 279 + .../storage/src/user/registration_token.rs | 319 + .../crates/storage/src/user/session.rs | 428 + .../crates/storage/src/user/terms.rs | 50 + .../crates/storage/src/utils.rs | 71 + ...110039b9a4a0425fd566e401f56ea215de0dd.json | 18 + ...4c8d05c577cf8f049d8822746c7d1dbd23752.json | 16 + ...233e5c9aabfdae1f0ee9b77c909b2bb2f3e25.json | 22 + ...5d4a86bf5758f8c32d9d41a22999b2f0698ca.json | 34 + ...b986b1b4864a778525d0b8b0ad6678aef3e9f.json | 18 + ...a9cd62ac3c9e58155882858c6056e2ef6c30d.json | 17 + ...85277958b66e4534561686c073e282fafaf2a.json | 20 + ...e3db8ff7a686180d71052911879f186ed1c8e.json | 18 + ...d9f96a754eba64912566e81a90bd4cbd186f0.json | 34 + ...664b16ebd813dfa0aa32a6d39dd5c393af299.json | 34 + ...747fcb5e79d7e8c1212b2a679c3bde908ce93.json | 16 + ...2b8e3d74161c8b6c5fe1a746b6958ccd2fd84.json | 32 + ...3c67bf00fd3e411f769b9f25dec27428489ed.json | 18 + ...7cee05ac1a628e51fe61ba6dfed253e0c63c2.json | 32 + ...12878ee329ca72070d849eb61ac9c8f9d1c76.json | 22 + ...099255155193dafbbd185cd8f26d93ff423a7.json | 17 + .../crates/syn2mas/Cargo.toml | 55 + .../crates/syn2mas/src/lib.rs | 27 + .../crates/syn2mas/src/mas_writer/checks.rs | 81 + .../src/mas_writer/constraint_pausing.rs | 170 + .../mas_writer/fixtures/upstream_provider.sql | 21 + .../crates/syn2mas/src/mas_writer/locking.rs | 60 + .../crates/syn2mas/src/mas_writer/mod.rs | 1716 + ...syn2mas__mas_writer__test__write_user.snap | 12 + ...r__test__write_user_with_access_token.snap | 30 + ..._writer__test__write_user_with_device.snap | 24 + ...s_writer__test__write_user_with_email.snap | 17 + ...riter__test__write_user_with_password.snap | 19 + ...__test__write_user_with_refresh_token.snap | 37 + ..._write_user_with_unsupported_threepid.snap | 17 + ...rite_user_with_upstream_provider_link.snap | 46 + .../syn2mas_revert_temporary_tables.sql | 18 + .../mas_writer/syn2mas_temporary_tables.sql | 47 + .../crates/syn2mas/src/migration.rs | 1050 + .../crates/syn2mas/src/progress.rs | 208 + .../syn2mas/src/synapse_reader/checks.rs | 351 + .../syn2mas/src/synapse_reader/config/mod.rs | 425 + .../syn2mas/src/synapse_reader/config/oidc.rs | 352 + .../fixtures/access_token_alice.sql | 19 + .../access_token_alice_with_puppet.sql | 21 + .../access_token_alice_with_refresh_token.sql | 61 + ..._token_alice_with_unused_refresh_token.sql | 61 + .../synapse_reader/fixtures/devices_alice.sql | 43 + .../fixtures/external_ids_alice.sql | 17 + .../fixtures/threepids_alice.sql | 28 + .../synapse_reader/fixtures/user_alice.sql | 43 + .../crates/syn2mas/src/synapse_reader/mod.rs | 730 + ..._test__read_access_and_refresh_tokens.snap | 16 + ...read_access_and_unused_refresh_tokens.snap | 26 + ...napse_reader__test__read_access_token.snap | 17 + ...s__synapse_reader__test__read_devices.snap | 26 + ...napse_reader__test__read_external_ids.snap | 13 + ..._synapse_reader__test__read_threepids.snap | 26 + ...mas__synapse_reader__test__read_users.snap | 28 + .../crates/syn2mas/src/telemetry.rs | 19 + .../20250117064958_users.sql | 23 + .../20250128141011_threepids.sql | 14 + .../20250128162513_external_ids.sql | 12 + ...250128201100_access_and_refresh_tokens.sql | 28 + .../20250129140230_devices.sql | 15 + .../crates/tasks/Cargo.toml | 46 + .../crates/tasks/src/cleanup/misc.rs | 88 + .../crates/tasks/src/cleanup/mod.rs | 24 + .../crates/tasks/src/cleanup/oauth.rs | 216 + .../crates/tasks/src/cleanup/sessions.rs | 290 + .../crates/tasks/src/cleanup/tokens.rs | 214 + .../crates/tasks/src/cleanup/user.rs | 181 + .../crates/tasks/src/email.rs | 135 + .../crates/tasks/src/lib.rs | 322 + .../crates/tasks/src/matrix.rs | 288 + .../crates/tasks/src/new_queue.rs | 1211 + .../crates/tasks/src/recovery.rs | 119 + .../crates/tasks/src/sessions.rs | 242 + .../crates/tasks/src/user.rs | 184 + .../crates/templates/Cargo.toml | 49 + .../crates/templates/src/context.rs | 2132 ++ .../crates/templates/src/context/branding.rs | 71 + .../crates/templates/src/context/captcha.rs | 75 + .../crates/templates/src/context/ext.rs | 54 + .../crates/templates/src/context/features.rs | 57 + .../crates/templates/src/forms.rs | 291 + .../crates/templates/src/functions.rs | 658 + .../crates/templates/src/lib.rs | 546 + .../crates/templates/src/macros.rs | 128 + .../crates/tower/Cargo.toml | 28 + .../crates/tower/src/lib.rs | 27 + .../crates/tower/src/metrics/duration.rs | 227 + .../crates/tower/src/metrics/in_flight.rs | 155 + .../tower/src/metrics/make_attributes.rs | 157 + .../crates/tower/src/metrics/mod.rs | 15 + .../crates/tower/src/trace_context.rs | 101 + .../crates/tower/src/tracing/enrich_span.rs | 106 + .../crates/tower/src/tracing/future.rs | 63 + .../crates/tower/src/tracing/layer.rs | 97 + .../crates/tower/src/tracing/make_span.rs | 64 + .../crates/tower/src/tracing/mod.rs | 19 + .../crates/tower/src/tracing/service.rs | 59 + .../crates/tower/src/utils.rs | 90 + matrix-authentication-service/deny.toml | 92 + matrix-authentication-service/docker-bake.hcl | 43 + matrix-authentication-service/docs/README.md | 20 + matrix-authentication-service/docs/SUMMARY.md | 52 + .../docs/api/index.html | 23 + .../docs/api/oauth2-redirect.html | 80 + .../docs/api/spec.json | 7449 ++++ .../docs/as-login.md | 7 + .../docs/config.schema.json | 2917 ++ .../docs/development/architecture.md | 113 + .../docs/development/cleanup-jobs.md | 280 + .../docs/development/contributing.md | 124 + .../docs/development/database.md | 78 + .../docs/development/graphql.md | 26 + .../docs/development/releasing.md | 123 + .../docs/reference/cli/README.md | 45 + .../docs/reference/cli/config.md | 59 + .../docs/reference/cli/database.md | 15 + .../docs/reference/cli/doctor.md | 16 + .../docs/reference/cli/manage.md | 147 + .../docs/reference/cli/server.md | 21 + .../docs/reference/cli/syn2mas.md | 29 + .../docs/reference/cli/templates.md | 33 + .../docs/reference/cli/worker.md | 13 + .../docs/reference/configuration.md | 886 + .../docs/reference/scopes.md | 101 + .../docs/rustdoc/mas_handlers/README.md | 2 + .../docs/setup/README.md | 21 + .../docs/setup/database.md | 75 + .../docs/setup/general.md | 76 + .../docs/setup/homeserver.md | 69 + .../docs/setup/installation.md | 112 + .../docs/setup/migration.md | 206 + .../docs/setup/reverse-proxy.md | 202 + .../docs/setup/running.md | 63 + .../docs/setup/sso.md | 670 + .../docs/storybook/README.md | 2 + .../docs/topics/access-token.md | 31 + .../docs/topics/admin-api.md | 272 + .../docs/topics/authorization.md | 170 + .../docs/topics/policy.md | 74 + .../frontend/.browserlistrc | 7 + .../frontend/.gitignore | 8 + matrix-authentication-service/frontend/.npmrc | 1 + .../frontend/.postcssrc.json | 8 + .../frontend/.storybook/locales.ts | 228 + .../frontend/.storybook/main.ts | 35 + .../frontend/.storybook/preview-head.html | 9 + .../frontend/.storybook/preview.tsx | 152 + .../.storybook/public/mockServiceWorker.js | 349 + .../frontend/codegen.ts | 33 + .../frontend/graphql.config.json | 4 + .../frontend/i18next.config.ts | 18 + .../frontend/index.html | 28 + .../frontend/knip.config.ts | 20 + .../frontend/locales/cs.json | 404 + .../frontend/locales/da.json | 401 + .../frontend/locales/de.json | 401 + .../frontend/locales/en.json | 329 + .../frontend/locales/et.json | 401 + .../frontend/locales/fi.json | 401 + .../frontend/locales/fr.json | 401 + .../frontend/locales/hu.json | 401 + .../frontend/locales/nb-NO.json | 401 + .../frontend/locales/nl.json | 401 + .../frontend/locales/pl.json | 404 + .../frontend/locales/pt.json | 401 + .../frontend/locales/ru.json | 404 + .../frontend/locales/sv.json | 401 + .../frontend/locales/uk.json | 404 + .../frontend/locales/zh-Hans.json | 398 + .../frontend/package-lock.json | 13509 +++++++ .../frontend/package.json | 88 + .../frontend/schema.graphql | 2491 ++ .../frontend/src/@types/i18next.d.ts | 19 + .../src/components/AccountDeleteButton.tsx | 276 + ...ccountManagementPasswordPreview.module.css | 24 + .../AccountManagementPasswordPreview.tsx | 53 + .../AccountManagementPasswordPreview/index.ts | 7 + .../src/components/BrowserSession.tsx | 135 + .../src/components/ButtonLink.module.css | 13 + .../frontend/src/components/ButtonLink.tsx | 39 + .../Client/OAuth2ClientDetail.test.tsx | 64 + .../components/Client/OAuth2ClientDetail.tsx | 96 + .../OAuth2ClientDetail.test.tsx.snap | 97 + .../Collapsible/Collapsible.module.css | 52 + .../Collapsible/Collapsible.stories.tsx | 29 + .../components/Collapsible/Collapsible.tsx | 72 + .../src/components/Collapsible/index.ts | 7 + .../src/components/CompatSession.test.tsx | 48 + .../frontend/src/components/CompatSession.tsx | 111 + .../src/components/DateTime.stories.tsx | 59 + .../frontend/src/components/DateTime.tsx | 58 + .../src/components/Dialog/Dialog.module.css | 134 + .../src/components/Dialog/Dialog.stories.tsx | 55 + .../frontend/src/components/Dialog/Dialog.tsx | 105 + .../frontend/src/components/Dialog/index.ts | 7 + .../EmptyState/EmptyState.module.css | 17 + .../EmptyState/EmptyState.stories.tsx | 27 + .../src/components/EmptyState/EmptyState.tsx | 25 + .../src/components/EmptyState/index.ts | 7 + .../frontend/src/components/ErrorBoundary.tsx | 57 + .../ExternalLink/ExternalLink.module.css | 11 + .../components/ExternalLink/ExternalLink.tsx | 26 + .../src/components/Filter/Filter.module.css | 51 + .../src/components/Filter/Filter.stories.tsx | 41 + .../frontend/src/components/Filter/Filter.tsx | 39 + .../frontend/src/components/Filter/index.ts | 7 + .../src/components/Footer/Footer.module.css | 33 + .../src/components/Footer/Footer.stories.tsx | 72 + .../frontend/src/components/Footer/Footer.tsx | 73 + .../frontend/src/components/Footer/index.ts | 7 + .../src/components/GenericError.module.css | 14 + .../frontend/src/components/GenericError.tsx | 53 + .../src/components/Layout/Layout.module.css | 40 + .../frontend/src/components/Layout/Layout.tsx | 59 + .../frontend/src/components/Layout/index.ts | 7 + .../frontend/src/components/Link.tsx | 10 + .../LoadingScreen/LoadingScreen.module.css | 18 + .../LoadingScreen/LoadingScreen.stories.tsx | 23 + .../LoadingScreen/LoadingScreen.test.tsx | 19 + .../LoadingScreen/LoadingScreen.tsx | 17 + .../__snapshots__/LoadingScreen.test.tsx.snap | 31 + .../src/components/LoadingScreen/index.ts | 7 + .../LoadingSpinner/LoadingSpinner.module.css | 43 + .../LoadingSpinner/LoadingSpinner.stories.tsx | 20 + .../LoadingSpinner/LoadingSpinner.tsx | 42 + .../src/components/LoadingSpinner/index.ts | 7 + .../src/components/NavBar/NavBar.module.css | 18 + .../src/components/NavBar/NavBar.stories.tsx | 28 + .../frontend/src/components/NavBar/NavBar.tsx | 15 + .../frontend/src/components/NavBar/index.ts | 7 + .../src/components/NavItem/NavItem.module.css | 61 + .../src/components/NavItem/NavItem.tsx | 23 + .../__snapshots__/NavItem.test.tsx.snap | 45 + .../frontend/src/components/NavItem/index.ts | 7 + .../frontend/src/components/NotFound.tsx | 19 + .../src/components/OAuth2Session.test.tsx | 70 + .../frontend/src/components/OAuth2Session.tsx | 133 + .../PageHeading/PageHeading.module.css | 82 + .../components/PageHeading/PageHeading.tsx | 44 + .../src/components/PageHeading/index.ts | 7 + .../src/components/PaginationControls.tsx | 58 + .../src/components/PasswordConfirmation.tsx | 107 + .../PasswordCreationDoubleInput.tsx | 161 + .../components/Separator/Separator.module.css | 13 + .../src/components/Separator/Separator.tsx | 35 + .../src/components/Separator/index.tsx | 6 + .../Session/ClientAvatar.module.css | 15 + .../components/Session/ClientAvatar.test.tsx | 30 + .../src/components/Session/ClientAvatar.tsx | 40 + .../Session/DeviceTypeIcon.module.css | 16 + .../Session/DeviceTypeIcon.stories.tsx | 45 + .../Session/DeviceTypeIcon.test.tsx | 36 + .../src/components/Session/DeviceTypeIcon.tsx | 47 + .../Session/EndBrowserSessionButton.tsx | 128 + .../Session/EndCompatSessionButton.tsx | 94 + .../Session/EndOAuth2SessionButton.tsx | 115 + .../components/Session/EndSessionButton.tsx | 68 + .../components/Session/LastActive.module.css | 10 + .../components/Session/LastActive.stories.tsx | 56 + .../components/Session/LastActive.test.tsx | 48 + .../src/components/Session/LastActive.tsx | 61 + .../__snapshots__/ClientAvatar.test.tsx.snap | 13 + .../DeviceTypeIcon.test.tsx.snap | 76 + .../__snapshots__/LastActive.test.tsx.snap | 42 + .../__snapshots__/Session.test.tsx.snap | 346 + .../SessionCard/SessionCard.module.css | 173 + .../SessionCard/SessionCard.stories.tsx | 70 + .../components/SessionCard/SessionCard.tsx | 99 + .../src/components/SessionCard/index.ts | 17 + .../SessionDetail/BrowserSessionDetail.tsx | 126 + .../CompatSessionDetail.test.tsx | 86 + .../SessionDetail/CompatSessionDetail.tsx | 186 + .../SessionDetail/EditSessionName.tsx | 103 + .../OAuth2SessionDetail.test.tsx | 70 + .../SessionDetail/OAuth2SessionDetail.tsx | 205 + .../SessionDetail/SessionHeader.module.css | 58 + .../SessionDetail/SessionHeader.stories.tsx | 31 + .../SessionDetail/SessionHeader.test.tsx | 27 + .../SessionDetail/SessionHeader.tsx | 27 + .../components/SessionDetail/SessionInfo.tsx | 188 + .../CompatSessionDetail.test.tsx.snap | 870 + .../OAuth2SessionDetail.test.tsx.snap | 631 + .../__snapshots__/SessionHeader.test.tsx.snap | 33 + .../src/components/Typography.stories.tsx | 102 + .../frontend/src/components/Typography.tsx | 51 + .../components/UserEmail/UserEmail.module.css | 72 + .../src/components/UserEmail/UserEmail.tsx | 202 + .../src/components/UserEmail/index.ts | 7 + .../UserGreeting/UserGreeting.module.css | 48 + .../UserGreeting/UserGreeting.stories.tsx | 81 + .../components/UserGreeting/UserGreeting.tsx | 219 + .../src/components/UserGreeting/index.ts | 7 + .../components/UserProfile/AddEmailForm.tsx | 184 + .../components/UserProfile/UserEmailList.tsx | 135 + .../BrowserSessionsOverview.module.css | 17 + .../BrowserSessionsOverview.stories.tsx | 54 + .../BrowserSessionsOverview.test.tsx | 54 + .../BrowserSessionsOverview.tsx | 48 + .../BrowserSessionsOverview.test.tsx.snap | 65 + .../UserSessionsOverview.test.tsx.snap | 3 + .../__snapshots__/CompatSession.test.tsx.snap | 193 + .../__snapshots__/LoadingScreen.test.tsx.snap | 28 + .../__snapshots__/OAuth2Session.test.tsx.snap | 266 + .../frontend/src/config.ts | 22 + .../frontend/src/entrypoints/main.tsx | 36 + .../frontend/src/entrypoints/shared.css | 19 + .../frontend/src/entrypoints/swagger.ts | 31 + .../frontend/src/entrypoints/templates.css | 263 + .../frontend/src/entrypoints/templates.ts | 98 + .../frontend/src/gql/fragment-masking.ts | 83 + .../frontend/src/gql/gql.ts | 356 + .../frontend/src/gql/graphql.ts | 3739 ++ .../frontend/src/gql/index.ts | 2 + .../frontend/src/graphql.ts | 81 + .../frontend/src/i18n.ts | 107 + .../frontend/src/i18n/password_changes.ts | 63 + .../frontend/src/pagination.ts | 142 + .../frontend/src/routeTree.gen.ts | 568 + .../frontend/src/router.tsx | 28 + .../frontend/src/routes/__root.tsx | 54 + .../frontend/src/routes/_account.index.tsx | 273 + .../src/routes/_account.plan.index.tsx | 126 + .../src/routes/_account.sessions.browsers.tsx | 192 + .../src/routes/_account.sessions.index.tsx | 227 + .../frontend/src/routes/_account.tsx | 75 + .../frontend/src/routes/clients.$id.tsx | 47 + .../frontend/src/routes/devices.$.tsx | 96 + .../frontend/src/routes/emails.$id.in-use.tsx | 56 + .../frontend/src/routes/emails.$id.verify.tsx | 228 + .../src/routes/password.change.index.tsx | 206 + .../src/routes/password.change.success.tsx | 36 + .../src/routes/password.recovery.index.tsx | 338 + .../routes/reset-cross-signing.cancelled.tsx | 30 + .../src/routes/reset-cross-signing.index.tsx | 163 + .../routes/reset-cross-signing.success.tsx | 33 + .../src/routes/reset-cross-signing.tsx | 49 + .../frontend/src/routes/sessions.$id.tsx | 100 + .../frontend/src/styles/cpd-button.css | 195 + .../src/styles/cpd-checkbox-control.css | 114 + .../frontend/src/styles/cpd-form.css | 121 + .../frontend/src/styles/cpd-link.css | 45 + .../frontend/src/styles/cpd-mfa-control.css | 104 + .../frontend/src/styles/cpd-text-control.css | 53 + .../frontend/src/test-utils/mockLocale.ts | 42 + .../frontend/src/test-utils/render.tsx | 25 + .../frontend/src/test-utils/router.tsx | 48 + .../frontend/src/utils/dates.ts | 13 + .../src/utils/deviceIdFromScope.test.ts | 29 + .../frontend/src/utils/deviceIdFromScope.ts | 21 + .../src/utils/password_complexity/enwiki.json | 30002 ++++++++++++++++ .../src/utils/password_complexity/index.ts | 190 + .../src/utils/password_complexity/namesf.json | 3714 ++ .../src/utils/password_complexity/namesm.json | 985 + .../src/utils/password_complexity/namess.json | 10002 ++++++ .../utils/password_complexity/passwords.json | 30002 ++++++++++++++++ .../utils/password_complexity/ustvfilm.json | 19162 ++++++++++ .../frontend/src/utils/simplifyUrl.ts | 33 + .../frontend/src/vite-env.d.ts | 7 + .../frontend/stories/routes/app.tsx | 37 + .../frontend/stories/routes/index.stories.tsx | 330 + .../routes/reset-cross-signing.stories.tsx | 73 + .../frontend/tailwind.config.cjs | 42 + .../frontend/tests/mocks/handlers.ts | 203 + .../reset-cross-signing.test.tsx.snap | 603 + .../account/__snapshots__/index.test.tsx.snap | 781 + .../tests/routes/account/index.test.tsx | 138 + .../frontend/tests/routes/render.tsx | 54 + .../tests/routes/reset-cross-signing.test.tsx | 109 + .../frontend/tests/routes/types.d.ts | 7 + .../frontend/tsconfig.json | 26 + .../frontend/tsconfig.node.json | 22 + .../frontend/vite.config.ts | 247 + .../frontend/vitest.global-setup.ts | 9 + .../frontend/vitest.setup.ts | 32 + matrix-authentication-service/localazy.json | 32 + .../misc/build-docs.sh | 58 + .../misc/device-code-grant.sh | 110 + .../misc/sqlx_update.sh | 40 + matrix-authentication-service/misc/update.sh | 25 + .../policies/.gitignore | 8 + .../policies/.regal/config.yaml | 14 + .../policies/Makefile | 68 + .../authorization_grant.rego | 172 + .../authorization_grant_test.rego | 256 + .../client_registration.rego | 207 + .../client_registration_test.rego | 427 + .../policies/common/common.rego | 89 + .../policies/common/common_test.rego | 49 + .../policies/compat_login/compat_login.rego | 74 + .../compat_login/compat_login_test.rego | 99 + .../policies/email/email.rego | 64 + .../policies/email/email_test.rego | 58 + .../policies/register/register.rego | 92 + .../policies/register/register_test.rego | 115 + .../schema/authorization_grant_input.json | 108 + .../schema/client_registration_input.json | 42 + .../policies/schema/compat_login_input.json | 144 + .../policies/schema/email_input.json | 41 + .../policies/schema/register_input.json | 58 + .../policies/util/coveralls.rego | 56 + .../templates/app.html | 33 + .../templates/base.html | 37 + .../templates/components/back_to_client.html | 37 + .../templates/components/button.html | 97 + .../templates/components/captcha.html | 41 + .../templates/components/errors.html | 23 + .../templates/components/field.html | 116 + .../templates/components/footer.html | 33 + .../templates/components/icon.html | 851 + .../templates/components/idp_brand.html | 53 + .../templates/components/logout.html | 23 + .../templates/components/scope.html | 60 + .../templates/device_name.txt | 28 + .../templates/emails/recovery.html | 52 + .../templates/emails/recovery.subject | 14 + .../templates/emails/recovery.txt | 16 + .../templates/emails/verification.html | 19 + .../templates/emails/verification.subject | 11 + .../templates/emails/verification.txt | 19 + .../templates/form_post.html | 32 + .../templates/pages/404.html | 27 + .../templates/pages/account/deactivated.html | 26 + .../templates/pages/account/locked.html | 26 + .../templates/pages/account/logged_out.html | 25 + .../pages/compat_login_policy_violation.html | 31 + .../templates/pages/consent.html | 84 + .../templates/pages/device_consent.html | 173 + .../templates/pages/device_link.html | 42 + .../templates/pages/error.html | 42 + .../templates/pages/index.html | 37 + .../templates/pages/login.html | 104 + .../templates/pages/policy_violation.html | 52 + .../templates/pages/reauth.html | 54 + .../templates/pages/recovery/consumed.html | 24 + .../templates/pages/recovery/disabled.html | 24 + .../templates/pages/recovery/expired.html | 32 + .../templates/pages/recovery/finish.html | 47 + .../templates/pages/recovery/progress.html | 37 + .../templates/pages/recovery/start.html | 40 + .../templates/pages/register/index.html | 66 + .../templates/pages/register/password.html | 82 + .../pages/register/steps/display_name.html | 52 + .../pages/register/steps/email_in_use.html | 30 + .../register/steps/registration_token.html | 44 + .../pages/register/steps/verify_email.html | 53 + .../templates/pages/sso.html | 59 + .../pages/upstream_oauth2/do_register.html | 194 + .../pages/upstream_oauth2/link_mismatch.html | 25 + .../pages/upstream_oauth2/suggest_link.html | 34 + .../templates/swagger/doc.html | 27 + .../templates/swagger/oauth2-redirect.html | 89 + .../translations/cs.json | 289 + .../translations/da.json | 281 + .../translations/de.json | 289 + .../translations/en.json | 802 + .../translations/et.json | 289 + .../translations/fi.json | 289 + .../translations/fr.json | 289 + .../translations/hu.json | 289 + .../translations/nb-NO.json | 289 + .../translations/nl.json | 84 + .../translations/pl.json | 289 + .../translations/pt.json | 279 + .../translations/ru.json | 289 + .../translations/sv.json | 252 + .../translations/uk.json | 289 + .../translations/zh-Hans.json | 281 + 1402 files changed, 322616 insertions(+) create mode 100644 matrix-authentication-service/.cargo/config.toml create mode 100644 matrix-authentication-service/.codecov.yml create mode 100644 matrix-authentication-service/.config/nextest.toml create mode 100644 matrix-authentication-service/.dockerignore create mode 100644 matrix-authentication-service/.editorconfig create mode 100644 matrix-authentication-service/.github/CODEOWNERS create mode 100644 matrix-authentication-service/.github/actions/build-frontend/action.yml create mode 100644 matrix-authentication-service/.github/actions/build-policies/action.yml create mode 100644 matrix-authentication-service/.github/dependabot.yml create mode 100644 matrix-authentication-service/.github/release.yml create mode 100644 matrix-authentication-service/.github/scripts/.gitignore create mode 100644 matrix-authentication-service/.github/scripts/cleanup-pr.cjs create mode 100644 matrix-authentication-service/.github/scripts/commit-and-tag.cjs create mode 100644 matrix-authentication-service/.github/scripts/create-release-branch.cjs create mode 100644 matrix-authentication-service/.github/scripts/create-version-tag.cjs create mode 100644 matrix-authentication-service/.github/scripts/merge-back.cjs create mode 100644 matrix-authentication-service/.github/scripts/package.json create mode 100644 matrix-authentication-service/.github/scripts/update-release-branch.cjs create mode 100644 matrix-authentication-service/.github/scripts/update-unstable-tag.cjs create mode 100644 matrix-authentication-service/.github/workflows/build.yaml create mode 100644 matrix-authentication-service/.github/workflows/ci.yaml create mode 100644 matrix-authentication-service/.github/workflows/coverage.yaml create mode 100644 matrix-authentication-service/.github/workflows/docs.yaml create mode 100644 matrix-authentication-service/.github/workflows/merge-back.yaml create mode 100644 matrix-authentication-service/.github/workflows/release-branch.yaml create mode 100644 matrix-authentication-service/.github/workflows/release-bump.yaml create mode 100644 matrix-authentication-service/.github/workflows/tag.yaml create mode 100644 matrix-authentication-service/.github/workflows/translations-download.yaml create mode 100644 matrix-authentication-service/.github/workflows/translations-upload.yaml create mode 100644 matrix-authentication-service/.gitignore create mode 100644 matrix-authentication-service/.rustfmt.toml create mode 100644 matrix-authentication-service/CONTRIBUTING.md create mode 100644 matrix-authentication-service/Cargo.lock create mode 100644 matrix-authentication-service/Cargo.toml create mode 100644 matrix-authentication-service/Dockerfile create mode 100644 matrix-authentication-service/LICENSE create mode 100644 matrix-authentication-service/LICENSE-COMMERCIAL create mode 100644 matrix-authentication-service/README.md create mode 100644 matrix-authentication-service/biome.json create mode 100644 matrix-authentication-service/book.toml create mode 100644 matrix-authentication-service/clippy.toml create mode 100644 matrix-authentication-service/crates/axum-utils/Cargo.toml create mode 100644 matrix-authentication-service/crates/axum-utils/src/client_authorization.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/cookies.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/csrf.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/error_wrapper.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/fancy_error.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/jwt.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/language_detection.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/lib.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/sentry.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/session.rs create mode 100644 matrix-authentication-service/crates/axum-utils/src/user_authorization.rs create mode 100644 matrix-authentication-service/crates/cli/Cargo.toml create mode 100644 matrix-authentication-service/crates/cli/build.rs create mode 100644 matrix-authentication-service/crates/cli/src/app_state.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/config.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/database.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/debug.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/doctor.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/manage.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/mod.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/server.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/syn2mas.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/templates.rs create mode 100644 matrix-authentication-service/crates/cli/src/commands/worker.rs create mode 100644 matrix-authentication-service/crates/cli/src/lifecycle.rs create mode 100644 matrix-authentication-service/crates/cli/src/main.rs create mode 100644 matrix-authentication-service/crates/cli/src/server.rs create mode 100644 matrix-authentication-service/crates/cli/src/sync.rs create mode 100644 matrix-authentication-service/crates/cli/src/telemetry.rs create mode 100644 matrix-authentication-service/crates/cli/src/util.rs create mode 100644 matrix-authentication-service/crates/config/Cargo.toml create mode 100644 matrix-authentication-service/crates/config/src/bin/schema.rs create mode 100644 matrix-authentication-service/crates/config/src/lib.rs create mode 100644 matrix-authentication-service/crates/config/src/schema.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/account.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/branding.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/captcha.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/clients.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/database.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/email.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/experimental.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/http.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/matrix.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/mod.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/passwords.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/policy.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/rate_limiting.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/secrets.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/telemetry.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/templates.rs create mode 100644 matrix-authentication-service/crates/config/src/sections/upstream_oauth2.rs create mode 100644 matrix-authentication-service/crates/config/src/util.rs create mode 100644 matrix-authentication-service/crates/context/Cargo.toml create mode 100644 matrix-authentication-service/crates/context/src/fmt.rs create mode 100644 matrix-authentication-service/crates/context/src/future.rs create mode 100644 matrix-authentication-service/crates/context/src/layer.rs create mode 100644 matrix-authentication-service/crates/context/src/lib.rs create mode 100644 matrix-authentication-service/crates/context/src/service.rs create mode 100644 matrix-authentication-service/crates/data-model/Cargo.toml create mode 100644 matrix-authentication-service/crates/data-model/examples/ua-parser.rs create mode 100644 matrix-authentication-service/crates/data-model/src/clock.rs create mode 100644 matrix-authentication-service/crates/data-model/src/compat/device.rs create mode 100644 matrix-authentication-service/crates/data-model/src/compat/mod.rs create mode 100644 matrix-authentication-service/crates/data-model/src/compat/session.rs create mode 100644 matrix-authentication-service/crates/data-model/src/compat/sso_login.rs create mode 100644 matrix-authentication-service/crates/data-model/src/lib.rs create mode 100644 matrix-authentication-service/crates/data-model/src/oauth2/authorization_grant.rs create mode 100644 matrix-authentication-service/crates/data-model/src/oauth2/client.rs create mode 100644 matrix-authentication-service/crates/data-model/src/oauth2/device_code_grant.rs create mode 100644 matrix-authentication-service/crates/data-model/src/oauth2/mod.rs create mode 100644 matrix-authentication-service/crates/data-model/src/oauth2/session.rs create mode 100644 matrix-authentication-service/crates/data-model/src/personal/mod.rs create mode 100644 matrix-authentication-service/crates/data-model/src/personal/session.rs create mode 100644 matrix-authentication-service/crates/data-model/src/policy_data.rs create mode 100644 matrix-authentication-service/crates/data-model/src/site_config.rs create mode 100644 matrix-authentication-service/crates/data-model/src/tokens.rs create mode 100644 matrix-authentication-service/crates/data-model/src/upstream_oauth2/link.rs create mode 100644 matrix-authentication-service/crates/data-model/src/upstream_oauth2/mod.rs create mode 100644 matrix-authentication-service/crates/data-model/src/upstream_oauth2/provider.rs create mode 100644 matrix-authentication-service/crates/data-model/src/upstream_oauth2/session.rs create mode 100644 matrix-authentication-service/crates/data-model/src/user_agent.rs create mode 100644 matrix-authentication-service/crates/data-model/src/users.rs create mode 100644 matrix-authentication-service/crates/data-model/src/utils.rs create mode 100644 matrix-authentication-service/crates/data-model/src/version.rs create mode 100644 matrix-authentication-service/crates/email/Cargo.toml create mode 100644 matrix-authentication-service/crates/email/src/lib.rs create mode 100644 matrix-authentication-service/crates/email/src/mailer.rs create mode 100644 matrix-authentication-service/crates/email/src/transport.rs create mode 100644 matrix-authentication-service/crates/handlers/Cargo.toml create mode 100644 matrix-authentication-service/crates/handlers/src/activity_tracker/bound.rs create mode 100644 matrix-authentication-service/crates/handlers/src/activity_tracker/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/activity_tracker/worker.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/call_context.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/model.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/params.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/response.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/schema.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/finish.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/list.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/finish.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/list.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/add.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/list.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/regenerate.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/revoke.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/get_latest.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/set.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/site_config.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/add.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/delete.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/list.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/list.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/add.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/delete.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/list.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/add.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/list.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/revoke.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/unrevoke.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/update.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/finish.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/list.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/add.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/by_username.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/deactivate.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/get.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/list.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/lock.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/reactivate.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/set_admin.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/set_password.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/users/unlock.rs create mode 100644 matrix-authentication-service/crates/handlers/src/admin/v1/version.rs create mode 100644 matrix-authentication-service/crates/handlers/src/bin/api-schema.rs create mode 100644 matrix-authentication-service/crates/handlers/src/bin/graphql-schema.rs create mode 100644 matrix-authentication-service/crates/handlers/src/captcha.rs create mode 100644 matrix-authentication-service/crates/handlers/src/cleanup_tests.rs create mode 100644 matrix-authentication-service/crates/handlers/src/compat/login.rs create mode 100644 matrix-authentication-service/crates/handlers/src/compat/login_sso_complete.rs create mode 100644 matrix-authentication-service/crates/handlers/src/compat/login_sso_redirect.rs create mode 100644 matrix-authentication-service/crates/handlers/src/compat/logout.rs create mode 100644 matrix-authentication-service/crates/handlers/src/compat/logout_all.rs create mode 100644 matrix-authentication-service/crates/handlers/src/compat/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/compat/refresh.rs create mode 100644 matrix-authentication-service/crates/handlers/src/compat/tests.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/browser_sessions.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/compat_sessions.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/cursor.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/matrix.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/node.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/oauth.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/site_config.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/upstream_oauth.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/users.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/viewer/anonymous.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/model/viewer/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/mutations/browser_session.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/mutations/compat_session.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/mutations/matrix.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/mutations/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/mutations/oauth2_session.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/mutations/user.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/mutations/user_email.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/query/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/query/session.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/query/upstream_oauth.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/query/user.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/query/viewer.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/state.rs create mode 100644 matrix-authentication-service/crates/handlers/src/graphql/tests.rs create mode 100644 matrix-authentication-service/crates/handlers/src/health.rs create mode 100644 matrix-authentication-service/crates/handlers/src/lib.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/authorization/callback.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/authorization/consent.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/authorization/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/device/authorize.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/device/consent.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/device/link.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/device/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/discovery.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/introspection.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/keys.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/registration.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/revoke.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/token.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/userinfo.rs create mode 100644 matrix-authentication-service/crates/handlers/src/oauth2/webfinger.rs create mode 100644 matrix-authentication-service/crates/handlers/src/passwords.rs create mode 100644 matrix-authentication-service/crates/handlers/src/preferred_language.rs create mode 100644 matrix-authentication-service/crates/handlers/src/rate_limit.rs create mode 100644 matrix-authentication-service/crates/handlers/src/session.rs create mode 100644 matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade-2.snap create mode 100644 matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade-3.snap create mode 100644 matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade.snap create mode 100644 matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_argon2id-2.snap create mode 100644 matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_argon2id.snap create mode 100644 matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_bcrypt-2.snap create mode 100644 matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_bcrypt.snap create mode 100644 matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_pbkdf2-2.snap create mode 100644 matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_pbkdf2.snap create mode 100644 matrix-authentication-service/crates/handlers/src/test_utils.rs create mode 100644 matrix-authentication-service/crates/handlers/src/upstream_oauth2/authorize.rs create mode 100644 matrix-authentication-service/crates/handlers/src/upstream_oauth2/backchannel_logout.rs create mode 100644 matrix-authentication-service/crates/handlers/src/upstream_oauth2/cache.rs create mode 100644 matrix-authentication-service/crates/handlers/src/upstream_oauth2/callback.rs create mode 100644 matrix-authentication-service/crates/handlers/src/upstream_oauth2/cookie.rs create mode 100644 matrix-authentication-service/crates/handlers/src/upstream_oauth2/link.rs create mode 100644 matrix-authentication-service/crates/handlers/src/upstream_oauth2/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/upstream_oauth2/template.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/app.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/index.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/login.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/logout.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/recovery/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/recovery/progress.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/recovery/start.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/register/cookie.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/register/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/register/password.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/register/steps/display_name.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/register/steps/finish.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/register/steps/mod.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/register/steps/registration_token.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/register/steps/verify_email.rs create mode 100644 matrix-authentication-service/crates/handlers/src/views/shared.rs create mode 100644 matrix-authentication-service/crates/http/Cargo.toml create mode 100644 matrix-authentication-service/crates/http/src/ext.rs create mode 100644 matrix-authentication-service/crates/http/src/lib.rs create mode 100644 matrix-authentication-service/crates/http/src/reqwest.rs create mode 100644 matrix-authentication-service/crates/i18n-scan/Cargo.toml create mode 100644 matrix-authentication-service/crates/i18n-scan/src/key.rs create mode 100644 matrix-authentication-service/crates/i18n-scan/src/main.rs create mode 100644 matrix-authentication-service/crates/i18n-scan/src/minijinja.rs create mode 100644 matrix-authentication-service/crates/i18n/Cargo.toml create mode 100644 matrix-authentication-service/crates/i18n/src/lib.rs create mode 100644 matrix-authentication-service/crates/i18n/src/sprintf/argument.rs create mode 100644 matrix-authentication-service/crates/i18n/src/sprintf/formatter.rs create mode 100644 matrix-authentication-service/crates/i18n/src/sprintf/grammar.pest create mode 100644 matrix-authentication-service/crates/i18n/src/sprintf/message.rs create mode 100644 matrix-authentication-service/crates/i18n/src/sprintf/mod.rs create mode 100644 matrix-authentication-service/crates/i18n/src/sprintf/parser.rs create mode 100644 matrix-authentication-service/crates/i18n/src/translations.rs create mode 100644 matrix-authentication-service/crates/i18n/src/translator.rs create mode 100644 matrix-authentication-service/crates/i18n/test_data/en-US.json create mode 100644 matrix-authentication-service/crates/i18n/test_data/en.json create mode 100644 matrix-authentication-service/crates/i18n/test_data/fr.json create mode 100644 matrix-authentication-service/crates/iana-codegen/Cargo.toml create mode 100644 matrix-authentication-service/crates/iana-codegen/src/generation.rs create mode 100644 matrix-authentication-service/crates/iana-codegen/src/jose.rs create mode 100644 matrix-authentication-service/crates/iana-codegen/src/main.rs create mode 100644 matrix-authentication-service/crates/iana-codegen/src/oauth.rs create mode 100644 matrix-authentication-service/crates/iana-codegen/src/traits.rs create mode 100644 matrix-authentication-service/crates/iana/Cargo.toml create mode 100644 matrix-authentication-service/crates/iana/src/jose.rs create mode 100644 matrix-authentication-service/crates/iana/src/lib.rs create mode 100644 matrix-authentication-service/crates/iana/src/oauth.rs create mode 100644 matrix-authentication-service/crates/jose/Cargo.toml create mode 100644 matrix-authentication-service/crates/jose/src/base64.rs create mode 100644 matrix-authentication-service/crates/jose/src/claims.rs create mode 100644 matrix-authentication-service/crates/jose/src/constraints.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwa/asymmetric.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwa/hmac.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwa/mod.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwa/signature.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwa/symmetric.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwk/mod.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwk/private_parameters.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwk/public_parameters.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwt/header.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwt/mod.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwt/raw.rs create mode 100644 matrix-authentication-service/crates/jose/src/jwt/signed.rs create mode 100644 matrix-authentication-service/crates/jose/src/lib.rs create mode 100644 matrix-authentication-service/crates/jose/tests/generate.py create mode 100644 matrix-authentication-service/crates/jose/tests/jws.rs create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/eddsa-ed25519.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/eddsa-ed448.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/es256.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/es256k.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/es384.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/es512.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/hs256.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/hs384.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/hs512.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/ps256.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/ps384.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/ps512.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/rs256.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/rs384.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/jwts/rs512.jwt create mode 100644 matrix-authentication-service/crates/jose/tests/keys/ed25519.priv.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/ed25519.pub.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/ed448.priv.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/ed448.pub.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/jwks.priv.json create mode 100644 matrix-authentication-service/crates/jose/tests/keys/jwks.pub.json create mode 100644 matrix-authentication-service/crates/jose/tests/keys/k256.priv.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/k256.pub.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/oct.bin create mode 100644 matrix-authentication-service/crates/jose/tests/keys/p256.priv.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/p256.pub.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/p384.priv.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/p384.pub.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/p521.priv.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/p521.pub.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/rsa.priv.pem create mode 100644 matrix-authentication-service/crates/jose/tests/keys/rsa.pub.pem create mode 100644 matrix-authentication-service/crates/jose/tests/snapshots/jws__es256__sign_jwt.snap create mode 100644 matrix-authentication-service/crates/jose/tests/snapshots/jws__es256k__sign_jwt.snap create mode 100644 matrix-authentication-service/crates/jose/tests/snapshots/jws__es384__sign_jwt.snap create mode 100644 matrix-authentication-service/crates/jose/tests/snapshots/jws__ps256__sign_jwt.snap create mode 100644 matrix-authentication-service/crates/jose/tests/snapshots/jws__ps384__sign_jwt.snap create mode 100644 matrix-authentication-service/crates/jose/tests/snapshots/jws__ps512__sign_jwt.snap create mode 100644 matrix-authentication-service/crates/jose/tests/snapshots/jws__rs256__sign_jwt.snap create mode 100644 matrix-authentication-service/crates/jose/tests/snapshots/jws__rs384__sign_jwt.snap create mode 100644 matrix-authentication-service/crates/jose/tests/snapshots/jws__rs512__sign_jwt.snap create mode 100644 matrix-authentication-service/crates/keystore/Cargo.toml create mode 100644 matrix-authentication-service/crates/keystore/src/encrypter.rs create mode 100644 matrix-authentication-service/crates/keystore/src/lib.rs create mode 100644 matrix-authentication-service/crates/keystore/tests/generate.sh create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-k256.pkcs8.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-k256.pkcs8.encrypted.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-k256.pkcs8.encrypted.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-k256.pkcs8.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-k256.sec1.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-k256.sec1.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.encrypted.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.encrypted.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p256.sec1.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p256.sec1.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.encrypted.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.encrypted.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p384.sec1.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec-p384.sec1.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/ec256.pkcs8.encrypted.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs1.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs1.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs8.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs8.encrypted.der create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs8.encrypted.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs8.pem create mode 100644 matrix-authentication-service/crates/keystore/tests/keystore.rs create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-2.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-3.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-4.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-5.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES256.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES256K.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES384.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS256.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS384.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS512.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS256.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS384.snap create mode 100644 matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS512.snap create mode 100644 matrix-authentication-service/crates/listener/Cargo.toml create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/ca-key.pem create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/ca.csr create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/ca.json create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/ca.pem create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/client-key.pem create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/client.csr create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/client.json create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/client.pem create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/config.json create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/gen.sh create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/server-key.pem create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/server.csr create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/server.json create mode 100644 matrix-authentication-service/crates/listener/examples/demo/certs/server.pem create mode 100644 matrix-authentication-service/crates/listener/examples/demo/main.rs create mode 100644 matrix-authentication-service/crates/listener/src/lib.rs create mode 100644 matrix-authentication-service/crates/listener/src/maybe_tls.rs create mode 100644 matrix-authentication-service/crates/listener/src/proxy_protocol/acceptor.rs create mode 100644 matrix-authentication-service/crates/listener/src/proxy_protocol/maybe.rs create mode 100644 matrix-authentication-service/crates/listener/src/proxy_protocol/mod.rs create mode 100644 matrix-authentication-service/crates/listener/src/proxy_protocol/v1.rs create mode 100644 matrix-authentication-service/crates/listener/src/rewind.rs create mode 100644 matrix-authentication-service/crates/listener/src/server.rs create mode 100644 matrix-authentication-service/crates/listener/src/unix_or_tcp.rs create mode 100644 matrix-authentication-service/crates/matrix-synapse/Cargo.toml create mode 100644 matrix-authentication-service/crates/matrix-synapse/src/error.rs create mode 100644 matrix-authentication-service/crates/matrix-synapse/src/legacy.rs create mode 100644 matrix-authentication-service/crates/matrix-synapse/src/lib.rs create mode 100644 matrix-authentication-service/crates/matrix-synapse/src/modern.rs create mode 100644 matrix-authentication-service/crates/matrix/Cargo.toml create mode 100644 matrix-authentication-service/crates/matrix/src/lib.rs create mode 100644 matrix-authentication-service/crates/matrix/src/mock.rs create mode 100644 matrix-authentication-service/crates/matrix/src/readonly.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/Cargo.toml create mode 100644 matrix-authentication-service/crates/oauth2-types/src/errors.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/lib.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/oidc.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/pkce.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/registration/client_metadata_serde.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/registration/mod.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/requests.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/response_type.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/scope.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/test_utils.rs create mode 100644 matrix-authentication-service/crates/oauth2-types/src/webfinger.rs create mode 100644 matrix-authentication-service/crates/oidc-client/Cargo.toml create mode 100644 matrix-authentication-service/crates/oidc-client/src/error.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/lib.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/requests/authorization_code.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/requests/client_credentials.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/requests/discovery.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/requests/jose.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/requests/mod.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/requests/refresh_token.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/requests/token.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/requests/userinfo.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/types/client_credentials.rs create mode 100644 matrix-authentication-service/crates/oidc-client/src/types/mod.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/main.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/requests/authorization_code.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/requests/client_credentials.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/requests/discovery.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/requests/jose.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/requests/mod.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/requests/refresh_token.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/requests/userinfo.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/types/client_credentials.rs create mode 100644 matrix-authentication-service/crates/oidc-client/tests/it/types/mod.rs create mode 100644 matrix-authentication-service/crates/policy/Cargo.toml create mode 100644 matrix-authentication-service/crates/policy/src/bin/schema.rs create mode 100644 matrix-authentication-service/crates/policy/src/lib.rs create mode 100644 matrix-authentication-service/crates/policy/src/model.rs create mode 100644 matrix-authentication-service/crates/router/Cargo.toml create mode 100644 matrix-authentication-service/crates/router/src/endpoints.rs create mode 100644 matrix-authentication-service/crates/router/src/lib.rs create mode 100644 matrix-authentication-service/crates/router/src/traits.rs create mode 100644 matrix-authentication-service/crates/router/src/url_builder.rs create mode 100644 matrix-authentication-service/crates/spa/Cargo.toml create mode 100644 matrix-authentication-service/crates/spa/src/lib.rs create mode 100644 matrix-authentication-service/crates/spa/src/vite.rs create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-015f7ad7c8d5403ce4dfb71d598fd9af472689d5aef7c1c4b1c594ca57c02237.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-037fae6964130343453ef607791c4c3deaa01b5aaa091d3a3487caf3e2634daf.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-03eee34f05df9c79f8ca5bfb1af339b3fcea95ba59395106318366a6ef432d85.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-047990a99794b565c2cad396946299db5b617f52f6c24bcca0a24c0c185c4478.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-048eec775f4af3ffd805e830e8286c6a5745e523b76e1083d6bfced0035c2f76.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-05b4dd39521eaf4e8e3c21654df67c00c8781f54054a84b3f3005b65cbc2a14a.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-06d67595eeef23d5f2773632e0956577d98074e244a35c0d3be24bc18d9d0daa.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-07cd2da428f0984513b4ce58e526c35c9c236ea8beb6696e5740fa45655e59f3.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-093d42238578771b4183b48c1680ba438b6b18306dfe1454fa4124c0207b3deb.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-0e1bce56e15751d82a622d532b279bfc50e22cb12ddf7495c7b0fedca61f9421.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-0e45995714e60b71e0f0158500a63aa46225245a04d1c7bc24b5275c44a6d58d.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-109f0c859e123966462f1001aef550e4e12d1778474aba72762d9aa093d21ee2.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-12c4577701416a9dc23708c46700f3f086e4e62c6de9d6864a6a11a2470ebe62.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-1764715e59f879f6b917ca30f8e3c1de5910c7a46e7fe52d1fb3bfd5561ac320.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-188a4aeef5a8b4bf3230c7176ded64d52804848df378dc74f8f54ec4404e094e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-1919d402fd6f148d14417f633be3353004f458c85f7b4f361802f86651900fbc.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-1b547552eed4128f2227c681ff2d45586cdb0c20b98393f89036fbf0f1d2dee2.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-1dbc50cdab36da307c569891ab7b1ab4aaf128fed6be67ca0f139d697614c63b.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-1eb829460407fca22b717b88a1a0a9b7b920d807a4b6c235e1bee524cd73b266.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-21b9e39ffd89de288305765c339a991d2471667cf2981770447cde6fd025fbb7.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-22896e8f2a002f307089c3e0f9ee561e6521c45ce07d3a42411984c9a6b75fdc.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-23d5fcd8bf611dc7279bef0d66ce05461c3c1f43f966fee3a80ae42540783f08.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-245cab1cf7d9cf4e94cdec91ecb4dc8e678278121efbe1f66bcdc24144d684d0.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-2564bf6366eb59268c41fb25bb40d0e4e9e1fd1f9ea53b7a359c9025d7304223.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-29148548d592046f7d711676911e3847e376e443ccd841f76b17a81f53fafc3a.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-2a0d8d70d21afa9a2c9c1c432853361bb85911c48f7db6c3873b0f5abf35940b.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-2a61003da3655158e6a261d91fdff670f1b4ba3c56605c53e2b905d7ec38c8be.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-2d249684e0e4db0e3bc189f821521657559d9b77fd931f972ce4d9f03a57f97a.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-2ee26886c56f04cd53d4c0968f5cf0963f92b6d15e6af0e69378a6447dee677c.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-2f66991d7b9ba58f011d9aef0eb6a38f3b244c2f46444c0ab345de7feff54aba.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-2f7aba76cd7df75d6a9a6d91d5ddebaedf37437f3bd4f796f5581fab997587d7.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-2f8d402b7217aef47a5c45d4f7cfddbaeedcbbc6963ee573409bfc98e57de6ed.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-31e8bf68ff70a436fd0b6787ac8e2777f9327708b450d048638a162343478cc6.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-38d0608b7d8ba30927f939491c1d43cfd962c729298ad07ee1ade2f2880c0eb3.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-38eb6b635d30ca78ff78b926b414cbd866cfc2918ca4b1741b5687f21cfe273b.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-399e261027fe6c9167511636157ab747a469404533f59ff6fbd56e9eb5ad38e1.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-3c7960a2eb2edd71bc71177fc0fb2e83858c9944893b8f3a0f0131e8a9b7a494.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-3c7fc3e386ce51187f6344ad65e1d78a7f026e8311bdc7d5ccc2f39d962e898f.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-3d66f3121b11ce923b9c60609b510a8ca899640e78cc8f5b03168622928ffe94.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-3e6e3aad53b22fc53eb3ee881b29bb249b18ced57d6a4809dffc23972b3e9423.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-3ed73cfce8ef6a1108f454e18b1668f64b76975dba07e67d04ed7a52e2e8107f.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-3f9d76f442c82a1631da931950b83b80c9620e1825ab07ab6c52f3f1a32d2527.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-432e199b0d47fe299d840c91159726c0a4f89f65b4dc3e33ddad58aabf6b148b.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-446a8d7bd8532a751810401adfab924dc20785c91770ed43d62df2e590e8da71.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-45d7e962d91fcdcf8284d81d04bc0737c0d20799b497089a566e2ff704d56b67.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-494ca16f0f00f977a3031924a15318aa7346917e5c8a37bb0f5b2b3067588009.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-4968c60adef69c7215a7efe2021baffb050b2f475ae106155c2e2f210a81191a.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-4c2064fed8fa464ea3d2a1258fb0544dbf1493cad31a21c0cd7ddb57ed12de16.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-4c37988dacca5a83c8b64209042d5f1a8ec44ec8ccccad2d7fce9ac855209883.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-4d0386ad2fe47f1aded46917abe6141752ba90d36467693a68318573171d57b0.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-4dad1838536c10ba723adc0fb6da0f24afb3d6a1925a80a1b6d35b9a8258a0ce.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-4e64540bbffe5f4b9c4a6589012cf69eb67adaa4d40fc1910dfcd2640e32ab37.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5006c3e60c98c91a0b0fbb3205373e81d9b75e90929af80961f8b5910873a43e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5133f9c5ba06201433be4ec784034d222975d084d0a9ebe7f1b6b865ab2e09ef.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-535225206622b9190ccf42f7d66268818dc84c37b168ab45e582e0a727796a06.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-53ad718642644b47a2d49f768d81bd993088526923769a9147281686c2d47591.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5402b8ddb674d05319830477eb3e72ecb536092b46c92a7dda01598962842323.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-55bc51efddf7a1cf06610fdb20d46beca29964733338ea4fec2a29393f031c4f.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-572ead41d62cfbe40e6f0c8edf6928e8eebd99036255b62d688ac02b5bd74b40.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5a6b91660e4c12b4a1fe2cad08e727a305cbe4029cd4cebd5ecc274e3e32f533.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5b21644dd3c094b0f2f8babb2c730554dc067d0a6cad963dd7e0c66a80b342bf.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5b697dd7834d33ec55972d3ba43d25fe794bc0b69c5938275711faa7a80b811f.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5d0d4699aa82b3976c6c1fcb0d77559da26def223b8954cf32959cce777577d7.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5da7a197e0008f100ad4daa78f4aa6515f0fc9eb54075e8d6d15520d25b75172.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5eea2f4c3e82ae606b09b8a81332594c97ba0afe972f0fee145b6094789fb6c7.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5f2199865fae3a969bb37429dd70dc74505b22c681322bd99b62c2a540c6cd35.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-5fe1bb569d13a7d3ff22887b3fc5b76ff901c183b314f8ccb5018d70c516abf6.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-607262ccf28b672df51e4e5d371e5cc5119a7d6e7fe784112703c0406f28300f.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-608366f45ecaf392ab69cddb12252b5efcc103c3383fa68b552295e2289d1f55.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-623097fc45ffa5d6e09fedfbdbe5e42662e9854430bcd9e53598debf99c9ca37.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-64b6e274e2bed6814f5ae41ddf57093589f7d1b2b8458521b635546b8012041e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-66693f31eff5673e88ca516ee727a709b06455e08b9fd75cc08f142070f330b3.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-67cd4880d84b38f20c3960789934d55cbfb01492985ac2af5a1ad4af9b3ccc77.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-6b8d28b76d7ab33178b46dbb28c11e41d86f22b3fa899a952cad00129e59bee6.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-6bd38759f569fcf972924d12f565b531b9873f4139eadcbf1450e726b9a27379.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-6d71188dffc492ddc8f7f21476516d3b08fd5d736ecf36845e6fd4bfc515b2cf.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-6db23fc9c39c2c7d9224d4e1233205f636568c990ccb05cf9208750ad1330b9b.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-6e21e7d816f806da9bb5176931bdb550dee05c44c9d93f53df95fe3b4a840347.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-6ecad60e565367a6cfa539b4c32dabe674ea853e0d47eb5c713705cb0130c758.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-6f97b5f9ad0d4d15387150bea3839fb7f81015f7ceef61ecaadba64521895cff.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-707d78340069627aba9f18bbe5ac1388d6723f82549d88d704d9c939b9d35c49.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-7189b6136fd08ac9ae7c51bff06fb2254d1bf9e8a97cd7d32ba789c740e0fbdb.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-755f62d0a3a40acc90037371339a8459736fdd4bbffd932f7930d847f2c3ef5d.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-75a62d170e4c959a14c5698f1da983113e7d1bc565d01e85c158856abb17ddc6.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-77dfa9fae1a9c77b70476d7da19d3313a02886994cfff0690451229fb5ae2f77.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-785e6bceed803cb1caccc373cde0c999d601f3a9730e6bbb40cfc43c04195c61.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-7a0641df5058927c5cd67d4cdaa59fe609112afbabcbfcc0e7f96c1e531b6567.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-7b06e6f21c69056b526538f06f06268efd13d7af3cecb452168d514a379fec30.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-7ce387b1b0aaf10e72adde667b19521b66eaafa51f73bf2f95e38b8f3b64a229.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-7e367e416d18fcf9b227bf053421410b4b7b4af441f0a138c5421d1111cb9f79.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-7e414c29745cf5c85fa4e7cb5d661b07f43ab168956470d120166ed7eab631d9.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-7f4c4634ada4dc2745530dcca8eee92abf78dfbdf1a25e58a2bc9c14be8035f0.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-7f8335cc94347bc3a15afe7051658659347a1bf71dd62335df046708f19c967e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-8275a440640ea28fd8f82e7df672e45a6eba981a0d621665ed8f8b60354b3389.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-83d1b0720dfde3209d77f1142aa19359913b8a934ca8a642b7bb43c9a7a58a6d.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-860e01cd660b450439d63c5ee31ade59f478b0b096b4bc90c89fb9c26b467dd2.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-875294dc5cf87bcf302fb9e87933745cc1c57bbe3c3c69110592a07400116c7f.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-89041298e272d15c21e2b7127bd16c5a4f48e2be87dc26e9d0e3a932c9c49dfb.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-89edaec8661e435c3b71bb9b995cd711eb78a4d39608e897432d6124cd135938.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-8acbdc892d44efb53529da1c2df65bea6b799a43cf4c9264a37d392847e6eff0.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-8afada5220fefb0d01ed6f87d3d0ee8fca86b5cdce9320e190e3d3b8fd9f63bc.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-8d240d72d651f59d53bed7380710038e9d00492b1e282237c0ec0e03bc36a9c0.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-8ef27901b96b73826a431ad6c5fabecc18c36d8cdba8db3b47953855fa5c9035.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-8ef977487429f84c557dc62272c47e411b96b2376288a90c242034295e1a147e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-8f4f071f844281fb14ecd99db3261540441b14c8206038fdc4a4336bbae3f382.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-8f5ce493e8b8473ba03d5263915a8b231f9e7c211ab83487536008e48316c269.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-90875bdd2f75cdf0dc3f48dc2516f5c701411387c939f6b8a3478b41b3de4f20.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-90fe32cb9c88a262a682c0db700fef7d69d6ce0be1f930d9f16c50b921a8b819.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-91a3ee5ad64a947b7807a590f6b014c6856229918b972b98946f98b75686ab6c.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-926cb81dc7931890a02c5a372aef79832e5d0748dad18ab44c6671f3196d6f60.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-92c8eb526fcc5de6874eb0fab1d71fb1ed3dafe2bd1a49aa72e4f4862931c6c2.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-933d2bed9c00eb9b37bfe757266ead15df5e0a4209ff47dcf4a5f19d35154e89.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-966ca0f7eebd2896c007b2fd6e9327d03b29fe413d57cce21c67b6d539f59e7d.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-98a5491eb5f10997ac1f3718c835903ac99d9bb8ca4d79c908b25a6d1209b9b1.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-9b7363000017fa3dee46441bc0679cb16f9f8df08fa258cc907007fb9bcd0bc7.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-9c9c65d4ca6847761d8f999253590082672b3782875cf3f5ba0b2f9d26e3a507.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-9e8152d445f9996b221ad3690ba982ad01035296bf4539ca5620a043924a7292.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-9eaf35f045aaca8473efc4a1f529afe24f01d9ec34609f373db5c535ccb58516.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-9f7bdc034c618e47e49c467d0d7f5b8c297d055abe248cc876dbc12c5a7dc920.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-9fe87eeaf4b7d0ba09b59ddad3476eb57ccb6e4053ab8f4450dd4a9d1f6ba108.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-a0be6c56e470382b9470df414497e260ba8911123744980e24a52bc9b95bd056.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-a2f7433f06fb4f6a7ad5ac6c1db18705276bce41e9b19d5d7e910ad4b767fb5e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-a50eb326c3522f971f6ee7e13dff61efbeb1ec24e2c694e1673347bae993762d.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-a63a217981b97448ddcc96b2489ddd9d3bc8c99b5b8b1d373939fc3ae9715c27.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-a7094d84d313602729fde155cfbe63041fca7cbab407f98452462ec45e3cfd16.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-a75a6a08c9639053cfc3cffa9d4a009785f358b334f5c586c2e358f0d0b4d856.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-a7f780528882a2ae66c45435215763eed0582264861436eab3f862e3eb12cab1.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-ab34912b42a48a8b5c8d63e271b99b7d0b690a2471873c6654b1b6cf2079b95c.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-ae6bf8958c4d9837d63f56574e91f91acc6076a8521adc3e30a83bf70e2121a0.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-afa86e79e3de2a83265cb0db8549d378a2f11b2a27bbd86d60558318c87eb698.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-b3568613352efae1125a88565d886157d96866f7ef9b09b03a45ba4322664bd0.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-b60d34f4d250c12f75dba10491c1337d69aebad12be6fbfbdde91e34083ba4ed.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-b6c4f4a23968cba2a82c2b7cfffc05a7ed582c9e5c1f65d27b0686f843ccfe42.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-b700dc3f7d0f86f4904725d8357e34b7e457f857ed37c467c314142877fd5367.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-b74e4d620bed4832a4e8e713a346691f260a7eca4bf494d6fb11c7cf699adaad.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-b91cc2458e1a530e7cadbd1ca3e2eaf93e1c44108b6770a24c9a24ac29db37d3.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-b992283a9b43cbb8f86149f3f55cb47fb628dabd8fadc50e6a5772903f851e1c.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-bb0f782756c274c06c1b63af6fc3ac2a7cedfd4247b57f062d348b4b1b36bef1.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-bb141d28c0c82244f31d542038c314d05ceb3a7b8f35397c0faef3b36d2d14a7.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-bbf62633c561706a762089bbab2f76a9ba3e2ed3539ef16accb601fb609c2ec9.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-c09e0bb0378d9dfb15de7f2f1209fab6ea87589819128e6fc9ed5da11dfc2770.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-c29fa41743811a6ac3a9b952b6ea75d18e914f823902587b63c9f295407144b1.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-c5e7dbb22488aca427b85b3415bd1f1a1766ff865f2e08a5daa095d2a1ccbd56.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-c960f4f5571ee68816c49898125979f3c78c2caca52cb4b8dc9880e669a1f23e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-c984ae0496d0bd7520ee3d6761ce6a4f61a6a2001b597e4c63ba4588ec5cf530.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-ca093cab5143bb3dded2eda9e82473215f4d3c549ea2c5a4f860a102cc46a667.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-cc60ad934d347fb4546205d1fe07e9d2f127cb15b1bb650d1ea3805a4c55b196.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-ce36eb8d3e4478a4e8520919ff41f1a5e6470cef581b1638f5578546dd28c4df.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-cf2eeca6d8dbc2cc72160a26e81f6e963096edb610183ba13cbbbd3d95c4134b.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-cf654533cfed946e9ac52dbcea1f50be3dfdac0fbfb1e8a0204c0c9c103ba5b0.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-d02248136aa6b27636814dee4e0bc38395ab6c6fdf979616fa16fc490897cee3.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-d0355d4e98bec6120f17d8cf81ac8c30ed19e9cebd0c8e7c7918b1c3ca0e3cba.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-d26e42d9fd2b2ee3cf9702c1666d83e7cffa26b320ae1442c7f3e22376c4a4ee.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-d4bc51c30f1119ea9d117fb565ec554d63c8773040679a77e99ac3fa24cec71d.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-d7a0e4fa2f168976505405c7e7800847f3379f7b57c0972659a35bfb68b0f6cd.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-d8f0b02952e786dd4309eac9de04a359aea3a46e5d4e07764cec56ce5d6609c0.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-d95cd1b4bcfa1d7bb236d49e1956fcc9a684609956972fe4f95aac13f30b2530.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-da02f93d7346992a9795f12b900f91ac0b326dd751c0d374d6ef4d19f671d22e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-dbf4be84eeff9ea51b00185faae2d453ab449017ed492bf6711dc7fceb630880.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-dca9b361c4409b14498b85f192b0034201575a49e0240ac6715b55ad8d381d0e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-dd02cc4a48123c28b34da8501060096c33df9e30611ef89d01bf0502119cbbe1.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-dda97742d389ffeeaab33d352d05767e2150f7da3cf384a7f44741c769f44144.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-e02ea83d195cb58fa8525e66a6ac1dddae3f1dfb1ef48494f6aee3fd03abe6f6.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-e1746b33c2f0d10f26332195f78e1ef2f192ca66f8000d1385626154e5ce4f7e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-e291be0434ab9c346dee777e50f8e601f12c8003fe93a5ecb110d02642d14c3c.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-e35d56de7136d43d0803ec825b0612e4185cef838f105d66f18cb24865e45140.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-e68a7084d44462d19f30902d7e6c1bd60bb771c6f075df15ab0137a7ffc896da.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-e8e48db74ac1ab5baa1e4b121643cfa33a0bf3328df6e869464fe7f31429b81e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-e99ab37ab3e03ad9c48792772b09bac77b09f67e623d5371ab4dadbe2d41fa1c.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-eb095f64bec5ac885683a8c6708320760971317c4519fae7af9d44e2be50985d.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-f0b4af5a9d6f1cc707a935fd5f34526a54ebbed8eef8f885f3a6723bc8490908.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-f41f76c94cd68fca2285b1cc60f426603c84df4ef1c6ce5dc441a63d2dc46f6e.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-f46e87bbb149b35e1d13b2b3cd2bdeab3c28a56a395f52f001a7bb013a5dfece.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-f50b7fb5a2c09e7b7e89e2addb0ca42c790c101a3fc9442862b5885d5116325a.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-f5c2ec9b7038d7ed36091e670f9bf34f8aa9ea8ed50929731845e32dc3176e39.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-f8182fd162ffb018d4f102fa7ddbc9991135065e81af8f77b5beef9405607577.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-fbf926f630df5d588df4f1c9c0dc0f594332be5829d5d7c6b66183ac25b3d166.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-fc9925e19000d79c0bb020ea44e13cbb364b3505626d34550e38f6f7397b9d42.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-fca331753aeccddbad96d06fc9d066dcefebe978a7af477bb6b55faa1d31e9b1.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-fcd8b4b9e003d1540357c6bf1ff9c715560d011d4c01112703a9c046170c84f1.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-fd32368fa6cd16a9704cdea54f7729681d450669563dd1178c492ffce51e5ff2.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-fe7bd146523e4bb321cb234d6bf9f3005b55c654897a8e46dc933c7fd2263c7c.json create mode 100644 matrix-authentication-service/crates/storage-pg/.sqlx/query-ffbfef8b7e72ec4bae02b6bbe862980b5fe575ae8432a000e9c4e4307caa2d9b.json create mode 100644 matrix-authentication-service/crates/storage-pg/Cargo.toml create mode 100644 matrix-authentication-service/crates/storage-pg/build.rs create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20220530084123_jobs_workers.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20221018142001_init.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20221121151402_upstream_oauth.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20221213145242_password_schemes.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230408234928_add_get_jobs_fn_.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230616093555_compat_admin_flag.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230621140528_upstream_oauth_claims_imports.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230626130338_oauth_clients_static.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230728154304_user_lock.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230823125247_drop_apalis_push_job.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230828085439_oauth2_clients_more_fields.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230828143553_user_session_authentication_source.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230829092920_oauth2_sessions_user_id_scope_list.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230829141928_user_session_user_agent.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230904135550_oauth2_client_credentials_grant.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230911091636_oauth2_token_expiration.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20230919155444_record_session_last_activity.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20231009142904_user_can_request_admin.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20231116104353_upstream_oauth_overrides.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20231120110559_upstream_oauth_branding.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20231207090532_oauth_device_code_grant.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20231208155602_oauth_clients_device_code_grant.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20240207100003_user_terms.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20240220141353_nonunique_compat_device_id.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20240220150201_compat_sessions_user_sessions_link.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20240221164945_sessions_user_agent.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20240301091201_upstream_oauth_additional_parameters.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20240402084854_upstream_oauth_disabled_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20240621080509_user_recovery.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20240718075125_sessions_active_index.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241004075132_queue_worker.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241004121132_queue_job.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241007160050_oidc_login_hint.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241115163340_upstream_oauth2_response_mode.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241118115314_upstream_oauth2_extra_query_params.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241120163320_queue_job_failures.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241122130349_queue_job_scheduled.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241122133435_queue_job_scheduled_index.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241124145741_upstream_oauth_userinfo.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241125110803_queue_job_recurrent.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241129091057_upstream_oauth2_link_account_name.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241202123523_upstream_oauth_responses_alg.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241210115428_oauth_refresh_token_track_next.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241210133651_oauth2_access_token_first_used.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241212154426_oauth2_response_mode_null.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20241213180524_upstream_oauth_optional_issuer.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250109105709_user_email_authentication_codes.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250113102144_user_registrations.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250114135939_allow_deviceless_compat_sessions.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250115155255_cleanup_unverified_emails.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250124151529_unsupported_threepids_table.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250129154003_compat_sessions_device_name.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250130170011_user_is_guest.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250225091000_dynamic_policy_data.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250311093145_user_deactivated_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250312094013_upstream_oauth2_providers_order.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250317151803_upstream_oauth_session_unlinked_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250325102310_oauth2_clients_hash.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250404105103_compat_sso_login_browser_session.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000000_idx_compat_access_tokens_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000001_idx_compat_refresh_tokens_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000002_idx_compat_refresh_tokens_access_token_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000003_idx_compat_sessions_user_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000004_idx_compat_sessions_user_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000005_drop_compat_sessions_user_id_last_active_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000006_idx_compat_sso_logins_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000007_idx_oauth2_access_tokens_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000008_idx_oauth2_authorization_grants_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000009_idx_oauth2_authorization_grants_client_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000010_idx_oauth2_consents_client_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000011_idx_oauth2_consents_user_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000012_idx_oauth2_device_code_grants_client_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000013_idx_oauth2_device_code_grants_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000014_idx_oauth2_device_code_grants_user_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000015_idx_oauth2_refresh_tokens_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000016_idx_oauth2_refresh_tokens_access_token_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000017_idx_oauth2_refresh_tokens_next_refresh_token_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000018_idx_oauth2_sessions_user_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000019_idx_oauth2_sessions_client_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000020_idx_oauth2_sessions_user_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000021_drop_oauth2_sessions_user_id_last_active_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000022_idx_queue_jobs_started_by_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000023_idx_queue_jobs_next_attempt_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000024_idx_queue_jobs_schedule_name_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000025_idx_upstream_oauth_authorization_sessions_provider_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000026_idx_upstream_oauth_authorization_sessions_link_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000027_idx_upstream_oauth_links_provider_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000028_idx_upstream_oauth_links_user_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000029_idx_user_email_authentication_codes_authentication_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000030_idx_user_email_authentications_user_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000031_idx_user_email_authentications_user_registration_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000032_idx_user_emails_user_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000033_idx_user_emails_email_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000034_idx_user_passwords_user_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000035_idx_user_recovery_tickets_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000036_idx_user_recovery_tickets_user_email_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000037_idx_user_registrations_email_authentication_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000038_idx_user_session_authentications_user_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000039_idx_user_session_authentications_user_password_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000040_idx_user_session_authentications_upstream_oauth_session_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000041_idx_user_sessions_user_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000042_drop_user_sessions_user_id_last_active_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000043_idx_user_terms_user_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000044_idx_users_primary_email_fk.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410000045_idx_user_recovery_tickets_ticket_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410121612_users_lower_username_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250410174306_oauth2_authorization_default_requires_consent.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250424150930_oauth2_grants_locale.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250425113717_oauth2_session_human_name.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250506161158_upstream_oauth2_forward_login_hint.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250507131948_upstream_oauth_session_optional_nonce.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250602212100_user_registration_tokens.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250602212101_idx_user_registration_token.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250602212102_upstream_oauth2_id_token_claims.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250602212103_upstream_oauth2_id_token_claims_sub_sid_index.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250602212104_upstream_oauth2_id_token_claims_sid_sub_index.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250630120643_upstream_oauth_on_backchannel_logout.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250708155857_idx_user_emails_lower_email.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250709142230_id_token_claims_trigger.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250709142240_backfill_id_token_claims.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250915092000_pgtrgm_extension.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250915092635_users_username_trgm_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20250924132713_personal_access_tokens.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20251023134634_personal_access_tokens_unique_fix.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20251121145458_user_registration_upstream_oauth_session.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20251127145951_user_registration_upstream_oauth_session_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260108111542_remove_apalis.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260108120030_remove_user_emails_old_confirmation.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260108121127_cleanup_oauth2_consents.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260108121952_cleanup_id_token_claims_trigger.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260108144040_remove_deactivated_unsupported_threepids.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260108145240_drop_oauth2_consents.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260108175627_oauth_access_tokens_revoked_at_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260109115009_oauth_access_tokens_expires_at_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260109172537_oauth_refresh_token_revoked_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260109172950_oauth_refresh_token_next_token_set_null.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260109172954_oauth_refresh_token_next_token_set_null_validate.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260112094550_oauth_refresh_token_not_consumed_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260112094837_oauth_refresh_token_consumed_at_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260115111313_idx_compat_sessions_finished_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260116000002_idx_upstream_oauth_links_orphaned.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260116000003_queue_jobs_next_attempt_set_null.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260116000004_queue_jobs_next_attempt_set_null_validate.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260121103025_upstream_oauth_track_user_session.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260121104214_upstream_auth_user_session_fk_idx.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260121112201_upstream_oauth_sessions_orphan_index.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260121121140_upstream_oauth_track_user_session_trigger.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260121121150_upstream_oauth_track_user_session_backfill.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260122113523_compat_sessions_user_session_no_action.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260122114353_compat_sessions_user_session_validate.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260122123211_idx_oauth2_sessions_finished_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260122124231_idx_user_sessions_finished_at.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260123090000_idx_oauth2_sessions_inactive_ips.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260123090001_idx_compat_sessions_inactive_ips.sql create mode 100644 matrix-authentication-service/crates/storage-pg/migrations/20260123090002_idx_user_sessions_inactive_ips.sql create mode 100644 matrix-authentication-service/crates/storage-pg/src/app_session.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/compat/access_token.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/compat/mod.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/compat/refresh_token.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/compat/session.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/compat/sso_login.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/errors.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/filter.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/iden.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/lib.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/oauth2/access_token.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/oauth2/authorization_grant.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/oauth2/client.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/oauth2/device_code_grant.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/oauth2/mod.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/oauth2/refresh_token.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/oauth2/session.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/pagination.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/personal/access_token.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/personal/mod.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/personal/session.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/policy_data.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/queue/job.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/queue/mod.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/queue/schedule.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/queue/worker.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/repository.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/telemetry.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/tracing.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/link.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/mod.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/provider.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/session.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/user/email.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/user/mod.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/user/password.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/user/recovery.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/user/registration.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/user/registration_token.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/user/session.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/user/terms.rs create mode 100644 matrix-authentication-service/crates/storage-pg/src/user/tests.rs create mode 100644 matrix-authentication-service/crates/storage/Cargo.toml create mode 100644 matrix-authentication-service/crates/storage/src/app_session.rs create mode 100644 matrix-authentication-service/crates/storage/src/compat/access_token.rs create mode 100644 matrix-authentication-service/crates/storage/src/compat/mod.rs create mode 100644 matrix-authentication-service/crates/storage/src/compat/refresh_token.rs create mode 100644 matrix-authentication-service/crates/storage/src/compat/session.rs create mode 100644 matrix-authentication-service/crates/storage/src/compat/sso_login.rs create mode 100644 matrix-authentication-service/crates/storage/src/lib.rs create mode 100644 matrix-authentication-service/crates/storage/src/oauth2/access_token.rs create mode 100644 matrix-authentication-service/crates/storage/src/oauth2/authorization_grant.rs create mode 100644 matrix-authentication-service/crates/storage/src/oauth2/client.rs create mode 100644 matrix-authentication-service/crates/storage/src/oauth2/device_code_grant.rs create mode 100644 matrix-authentication-service/crates/storage/src/oauth2/mod.rs create mode 100644 matrix-authentication-service/crates/storage/src/oauth2/refresh_token.rs create mode 100644 matrix-authentication-service/crates/storage/src/oauth2/session.rs create mode 100644 matrix-authentication-service/crates/storage/src/pagination.rs create mode 100644 matrix-authentication-service/crates/storage/src/personal/access_token.rs create mode 100644 matrix-authentication-service/crates/storage/src/personal/mod.rs create mode 100644 matrix-authentication-service/crates/storage/src/personal/session.rs create mode 100644 matrix-authentication-service/crates/storage/src/policy_data.rs create mode 100644 matrix-authentication-service/crates/storage/src/queue/job.rs create mode 100644 matrix-authentication-service/crates/storage/src/queue/mod.rs create mode 100644 matrix-authentication-service/crates/storage/src/queue/schedule.rs create mode 100644 matrix-authentication-service/crates/storage/src/queue/tasks.rs create mode 100644 matrix-authentication-service/crates/storage/src/queue/worker.rs create mode 100644 matrix-authentication-service/crates/storage/src/repository.rs create mode 100644 matrix-authentication-service/crates/storage/src/upstream_oauth2/link.rs create mode 100644 matrix-authentication-service/crates/storage/src/upstream_oauth2/mod.rs create mode 100644 matrix-authentication-service/crates/storage/src/upstream_oauth2/provider.rs create mode 100644 matrix-authentication-service/crates/storage/src/upstream_oauth2/session.rs create mode 100644 matrix-authentication-service/crates/storage/src/user/email.rs create mode 100644 matrix-authentication-service/crates/storage/src/user/mod.rs create mode 100644 matrix-authentication-service/crates/storage/src/user/password.rs create mode 100644 matrix-authentication-service/crates/storage/src/user/recovery.rs create mode 100644 matrix-authentication-service/crates/storage/src/user/registration.rs create mode 100644 matrix-authentication-service/crates/storage/src/user/registration_token.rs create mode 100644 matrix-authentication-service/crates/storage/src/user/session.rs create mode 100644 matrix-authentication-service/crates/storage/src/user/terms.rs create mode 100644 matrix-authentication-service/crates/storage/src/utils.rs create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-026adeffc646b41ebc096bb874d110039b9a4a0425fd566e401f56ea215de0dd.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-07ec66733b67a9990cc9d483b564c8d05c577cf8f049d8822746c7d1dbd23752.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-09db58b250c20ab9d1701653165233e5c9aabfdae1f0ee9b77c909b2bb2f3e25.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-12112011318abc0bdd7f722ed8c5d4a86bf5758f8c32d9d41a22999b2f0698ca.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-1d1004d0fb5939fbf30c1986b80b986b1b4864a778525d0b8b0ad6678aef3e9f.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-204cf4811150a7fdeafa9373647a9cd62ac3c9e58155882858c6056e2ef6c30d.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-207b880ec2dd484ad05a7138ba485277958b66e4534561686c073e282fafaf2a.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-24f6ce6280dc6675ab1ebdde0c5e3db8ff7a686180d71052911879f186ed1c8e.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-486f3177dcf6117c6b966954a44d9f96a754eba64912566e81a90bd4cbd186f0.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-5b4840f42ae00c5dc9f59f2745d664b16ebd813dfa0aa32a6d39dd5c393af299.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-69aa96208513c3ea64a446c7739747fcb5e79d7e8c1212b2a679c3bde908ce93.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-78ed3bf1032cd678b42230d68fb2b8e3d74161c8b6c5fe1a746b6958ccd2fd84.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-86b2b02fbb6350100d794e4d0fa3c67bf00fd3e411f769b9f25dec27428489ed.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-979bedd942b4f71c58f3672f2917cee05ac1a628e51fe61ba6dfed253e0c63c2.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-b27828d7510d52456b50b4c4b9712878ee329ca72070d849eb61ac9c8f9d1c76.json create mode 100644 matrix-authentication-service/crates/syn2mas/.sqlx/query-ebf68b70b3e22a04b57b5587b4b099255155193dafbbd185cd8f26d93ff423a7.json create mode 100644 matrix-authentication-service/crates/syn2mas/Cargo.toml create mode 100644 matrix-authentication-service/crates/syn2mas/src/lib.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/checks.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/constraint_pausing.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/fixtures/upstream_provider.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/locking.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/mod.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_access_token.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_device.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_email.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_password.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_refresh_token.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_unsupported_threepid.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_upstream_provider_link.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/syn2mas_revert_temporary_tables.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/mas_writer/syn2mas_temporary_tables.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/migration.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/progress.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/checks.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/config/mod.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/config/oidc.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_puppet.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_refresh_token.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_unused_refresh_token.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/devices_alice.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/external_ids_alice.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/threepids_alice.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/user_alice.sql create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/mod.rs create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_and_refresh_tokens.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_and_unused_refresh_tokens.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_token.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_devices.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_external_ids.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_threepids.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_users.snap create mode 100644 matrix-authentication-service/crates/syn2mas/src/telemetry.rs create mode 100644 matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250117064958_users.sql create mode 100644 matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128141011_threepids.sql create mode 100644 matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128162513_external_ids.sql create mode 100644 matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128201100_access_and_refresh_tokens.sql create mode 100644 matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250129140230_devices.sql create mode 100644 matrix-authentication-service/crates/tasks/Cargo.toml create mode 100644 matrix-authentication-service/crates/tasks/src/cleanup/misc.rs create mode 100644 matrix-authentication-service/crates/tasks/src/cleanup/mod.rs create mode 100644 matrix-authentication-service/crates/tasks/src/cleanup/oauth.rs create mode 100644 matrix-authentication-service/crates/tasks/src/cleanup/sessions.rs create mode 100644 matrix-authentication-service/crates/tasks/src/cleanup/tokens.rs create mode 100644 matrix-authentication-service/crates/tasks/src/cleanup/user.rs create mode 100644 matrix-authentication-service/crates/tasks/src/email.rs create mode 100644 matrix-authentication-service/crates/tasks/src/lib.rs create mode 100644 matrix-authentication-service/crates/tasks/src/matrix.rs create mode 100644 matrix-authentication-service/crates/tasks/src/new_queue.rs create mode 100644 matrix-authentication-service/crates/tasks/src/recovery.rs create mode 100644 matrix-authentication-service/crates/tasks/src/sessions.rs create mode 100644 matrix-authentication-service/crates/tasks/src/user.rs create mode 100644 matrix-authentication-service/crates/templates/Cargo.toml create mode 100644 matrix-authentication-service/crates/templates/src/context.rs create mode 100644 matrix-authentication-service/crates/templates/src/context/branding.rs create mode 100644 matrix-authentication-service/crates/templates/src/context/captcha.rs create mode 100644 matrix-authentication-service/crates/templates/src/context/ext.rs create mode 100644 matrix-authentication-service/crates/templates/src/context/features.rs create mode 100644 matrix-authentication-service/crates/templates/src/forms.rs create mode 100644 matrix-authentication-service/crates/templates/src/functions.rs create mode 100644 matrix-authentication-service/crates/templates/src/lib.rs create mode 100644 matrix-authentication-service/crates/templates/src/macros.rs create mode 100644 matrix-authentication-service/crates/tower/Cargo.toml create mode 100644 matrix-authentication-service/crates/tower/src/lib.rs create mode 100644 matrix-authentication-service/crates/tower/src/metrics/duration.rs create mode 100644 matrix-authentication-service/crates/tower/src/metrics/in_flight.rs create mode 100644 matrix-authentication-service/crates/tower/src/metrics/make_attributes.rs create mode 100644 matrix-authentication-service/crates/tower/src/metrics/mod.rs create mode 100644 matrix-authentication-service/crates/tower/src/trace_context.rs create mode 100644 matrix-authentication-service/crates/tower/src/tracing/enrich_span.rs create mode 100644 matrix-authentication-service/crates/tower/src/tracing/future.rs create mode 100644 matrix-authentication-service/crates/tower/src/tracing/layer.rs create mode 100644 matrix-authentication-service/crates/tower/src/tracing/make_span.rs create mode 100644 matrix-authentication-service/crates/tower/src/tracing/mod.rs create mode 100644 matrix-authentication-service/crates/tower/src/tracing/service.rs create mode 100644 matrix-authentication-service/crates/tower/src/utils.rs create mode 100644 matrix-authentication-service/deny.toml create mode 100644 matrix-authentication-service/docker-bake.hcl create mode 100644 matrix-authentication-service/docs/README.md create mode 100644 matrix-authentication-service/docs/SUMMARY.md create mode 100644 matrix-authentication-service/docs/api/index.html create mode 100644 matrix-authentication-service/docs/api/oauth2-redirect.html create mode 100644 matrix-authentication-service/docs/api/spec.json create mode 100644 matrix-authentication-service/docs/as-login.md create mode 100644 matrix-authentication-service/docs/config.schema.json create mode 100644 matrix-authentication-service/docs/development/architecture.md create mode 100644 matrix-authentication-service/docs/development/cleanup-jobs.md create mode 100644 matrix-authentication-service/docs/development/contributing.md create mode 100644 matrix-authentication-service/docs/development/database.md create mode 100644 matrix-authentication-service/docs/development/graphql.md create mode 100644 matrix-authentication-service/docs/development/releasing.md create mode 100644 matrix-authentication-service/docs/reference/cli/README.md create mode 100644 matrix-authentication-service/docs/reference/cli/config.md create mode 100644 matrix-authentication-service/docs/reference/cli/database.md create mode 100644 matrix-authentication-service/docs/reference/cli/doctor.md create mode 100644 matrix-authentication-service/docs/reference/cli/manage.md create mode 100644 matrix-authentication-service/docs/reference/cli/server.md create mode 100644 matrix-authentication-service/docs/reference/cli/syn2mas.md create mode 100644 matrix-authentication-service/docs/reference/cli/templates.md create mode 100644 matrix-authentication-service/docs/reference/cli/worker.md create mode 100644 matrix-authentication-service/docs/reference/configuration.md create mode 100644 matrix-authentication-service/docs/reference/scopes.md create mode 100644 matrix-authentication-service/docs/rustdoc/mas_handlers/README.md create mode 100644 matrix-authentication-service/docs/setup/README.md create mode 100644 matrix-authentication-service/docs/setup/database.md create mode 100644 matrix-authentication-service/docs/setup/general.md create mode 100644 matrix-authentication-service/docs/setup/homeserver.md create mode 100644 matrix-authentication-service/docs/setup/installation.md create mode 100644 matrix-authentication-service/docs/setup/migration.md create mode 100644 matrix-authentication-service/docs/setup/reverse-proxy.md create mode 100644 matrix-authentication-service/docs/setup/running.md create mode 100644 matrix-authentication-service/docs/setup/sso.md create mode 100644 matrix-authentication-service/docs/storybook/README.md create mode 100644 matrix-authentication-service/docs/topics/access-token.md create mode 100644 matrix-authentication-service/docs/topics/admin-api.md create mode 100644 matrix-authentication-service/docs/topics/authorization.md create mode 100644 matrix-authentication-service/docs/topics/policy.md create mode 100644 matrix-authentication-service/frontend/.browserlistrc create mode 100644 matrix-authentication-service/frontend/.gitignore create mode 100644 matrix-authentication-service/frontend/.npmrc create mode 100644 matrix-authentication-service/frontend/.postcssrc.json create mode 100644 matrix-authentication-service/frontend/.storybook/locales.ts create mode 100644 matrix-authentication-service/frontend/.storybook/main.ts create mode 100644 matrix-authentication-service/frontend/.storybook/preview-head.html create mode 100644 matrix-authentication-service/frontend/.storybook/preview.tsx create mode 100644 matrix-authentication-service/frontend/.storybook/public/mockServiceWorker.js create mode 100644 matrix-authentication-service/frontend/codegen.ts create mode 100644 matrix-authentication-service/frontend/graphql.config.json create mode 100644 matrix-authentication-service/frontend/i18next.config.ts create mode 100644 matrix-authentication-service/frontend/index.html create mode 100644 matrix-authentication-service/frontend/knip.config.ts create mode 100644 matrix-authentication-service/frontend/locales/cs.json create mode 100644 matrix-authentication-service/frontend/locales/da.json create mode 100644 matrix-authentication-service/frontend/locales/de.json create mode 100644 matrix-authentication-service/frontend/locales/en.json create mode 100644 matrix-authentication-service/frontend/locales/et.json create mode 100644 matrix-authentication-service/frontend/locales/fi.json create mode 100644 matrix-authentication-service/frontend/locales/fr.json create mode 100644 matrix-authentication-service/frontend/locales/hu.json create mode 100644 matrix-authentication-service/frontend/locales/nb-NO.json create mode 100644 matrix-authentication-service/frontend/locales/nl.json create mode 100644 matrix-authentication-service/frontend/locales/pl.json create mode 100644 matrix-authentication-service/frontend/locales/pt.json create mode 100644 matrix-authentication-service/frontend/locales/ru.json create mode 100644 matrix-authentication-service/frontend/locales/sv.json create mode 100644 matrix-authentication-service/frontend/locales/uk.json create mode 100644 matrix-authentication-service/frontend/locales/zh-Hans.json create mode 100644 matrix-authentication-service/frontend/package-lock.json create mode 100644 matrix-authentication-service/frontend/package.json create mode 100644 matrix-authentication-service/frontend/schema.graphql create mode 100644 matrix-authentication-service/frontend/src/@types/i18next.d.ts create mode 100644 matrix-authentication-service/frontend/src/components/AccountDeleteButton.tsx create mode 100644 matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/AccountManagementPasswordPreview.module.css create mode 100644 matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/AccountManagementPasswordPreview.tsx create mode 100644 matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/BrowserSession.tsx create mode 100644 matrix-authentication-service/frontend/src/components/ButtonLink.module.css create mode 100644 matrix-authentication-service/frontend/src/components/ButtonLink.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Client/OAuth2ClientDetail.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Client/OAuth2ClientDetail.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Client/__snapshots__/OAuth2ClientDetail.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.module.css create mode 100644 matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Collapsible/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/CompatSession.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/CompatSession.tsx create mode 100644 matrix-authentication-service/frontend/src/components/DateTime.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/DateTime.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Dialog/Dialog.module.css create mode 100644 matrix-authentication-service/frontend/src/components/Dialog/Dialog.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Dialog/Dialog.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Dialog/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.module.css create mode 100644 matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.tsx create mode 100644 matrix-authentication-service/frontend/src/components/EmptyState/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/ErrorBoundary.tsx create mode 100644 matrix-authentication-service/frontend/src/components/ExternalLink/ExternalLink.module.css create mode 100644 matrix-authentication-service/frontend/src/components/ExternalLink/ExternalLink.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Filter/Filter.module.css create mode 100644 matrix-authentication-service/frontend/src/components/Filter/Filter.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Filter/Filter.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Filter/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/Footer/Footer.module.css create mode 100644 matrix-authentication-service/frontend/src/components/Footer/Footer.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Footer/Footer.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Footer/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/GenericError.module.css create mode 100644 matrix-authentication-service/frontend/src/components/GenericError.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Layout/Layout.module.css create mode 100644 matrix-authentication-service/frontend/src/components/Layout/Layout.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Layout/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/Link.tsx create mode 100644 matrix-authentication-service/frontend/src/components/LoadingScreen/LoadingScreen.module.css create mode 100644 matrix-authentication-service/frontend/src/components/LoadingScreen/LoadingScreen.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/LoadingScreen/LoadingScreen.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/LoadingScreen/LoadingScreen.tsx create mode 100644 matrix-authentication-service/frontend/src/components/LoadingScreen/__snapshots__/LoadingScreen.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/LoadingScreen/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/LoadingSpinner/LoadingSpinner.module.css create mode 100644 matrix-authentication-service/frontend/src/components/LoadingSpinner/LoadingSpinner.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/LoadingSpinner/LoadingSpinner.tsx create mode 100644 matrix-authentication-service/frontend/src/components/LoadingSpinner/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/NavBar/NavBar.module.css create mode 100644 matrix-authentication-service/frontend/src/components/NavBar/NavBar.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/NavBar/NavBar.tsx create mode 100644 matrix-authentication-service/frontend/src/components/NavBar/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/NavItem/NavItem.module.css create mode 100644 matrix-authentication-service/frontend/src/components/NavItem/NavItem.tsx create mode 100644 matrix-authentication-service/frontend/src/components/NavItem/__snapshots__/NavItem.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/NavItem/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/NotFound.tsx create mode 100644 matrix-authentication-service/frontend/src/components/OAuth2Session.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/OAuth2Session.tsx create mode 100644 matrix-authentication-service/frontend/src/components/PageHeading/PageHeading.module.css create mode 100644 matrix-authentication-service/frontend/src/components/PageHeading/PageHeading.tsx create mode 100644 matrix-authentication-service/frontend/src/components/PageHeading/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/PaginationControls.tsx create mode 100644 matrix-authentication-service/frontend/src/components/PasswordConfirmation.tsx create mode 100644 matrix-authentication-service/frontend/src/components/PasswordCreationDoubleInput.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Separator/Separator.module.css create mode 100644 matrix-authentication-service/frontend/src/components/Separator/Separator.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Separator/index.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/ClientAvatar.module.css create mode 100644 matrix-authentication-service/frontend/src/components/Session/ClientAvatar.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/ClientAvatar.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/DeviceTypeIcon.module.css create mode 100644 matrix-authentication-service/frontend/src/components/Session/DeviceTypeIcon.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/DeviceTypeIcon.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/DeviceTypeIcon.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/EndBrowserSessionButton.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/EndCompatSessionButton.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/EndOAuth2SessionButton.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/EndSessionButton.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/LastActive.module.css create mode 100644 matrix-authentication-service/frontend/src/components/Session/LastActive.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/LastActive.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/LastActive.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Session/__snapshots__/ClientAvatar.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/Session/__snapshots__/DeviceTypeIcon.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/Session/__snapshots__/LastActive.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/Session/__snapshots__/Session.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/SessionCard/SessionCard.module.css create mode 100644 matrix-authentication-service/frontend/src/components/SessionCard/SessionCard.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionCard/SessionCard.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionCard/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/BrowserSessionDetail.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/CompatSessionDetail.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/CompatSessionDetail.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/EditSessionName.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/OAuth2SessionDetail.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/OAuth2SessionDetail.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/SessionHeader.module.css create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/SessionHeader.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/SessionHeader.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/SessionHeader.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/SessionInfo.tsx create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/__snapshots__/CompatSessionDetail.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/__snapshots__/OAuth2SessionDetail.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/SessionDetail/__snapshots__/SessionHeader.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/Typography.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/Typography.tsx create mode 100644 matrix-authentication-service/frontend/src/components/UserEmail/UserEmail.module.css create mode 100644 matrix-authentication-service/frontend/src/components/UserEmail/UserEmail.tsx create mode 100644 matrix-authentication-service/frontend/src/components/UserEmail/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/UserGreeting/UserGreeting.module.css create mode 100644 matrix-authentication-service/frontend/src/components/UserGreeting/UserGreeting.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/UserGreeting/UserGreeting.tsx create mode 100644 matrix-authentication-service/frontend/src/components/UserGreeting/index.ts create mode 100644 matrix-authentication-service/frontend/src/components/UserProfile/AddEmailForm.tsx create mode 100644 matrix-authentication-service/frontend/src/components/UserProfile/UserEmailList.tsx create mode 100644 matrix-authentication-service/frontend/src/components/UserSessionsOverview/BrowserSessionsOverview.module.css create mode 100644 matrix-authentication-service/frontend/src/components/UserSessionsOverview/BrowserSessionsOverview.stories.tsx create mode 100644 matrix-authentication-service/frontend/src/components/UserSessionsOverview/BrowserSessionsOverview.test.tsx create mode 100644 matrix-authentication-service/frontend/src/components/UserSessionsOverview/BrowserSessionsOverview.tsx create mode 100644 matrix-authentication-service/frontend/src/components/UserSessionsOverview/__snapshots__/BrowserSessionsOverview.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/UserSessionsOverview/__snapshots__/UserSessionsOverview.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/__snapshots__/CompatSession.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/__snapshots__/LoadingScreen.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/components/__snapshots__/OAuth2Session.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/src/config.ts create mode 100644 matrix-authentication-service/frontend/src/entrypoints/main.tsx create mode 100644 matrix-authentication-service/frontend/src/entrypoints/shared.css create mode 100644 matrix-authentication-service/frontend/src/entrypoints/swagger.ts create mode 100644 matrix-authentication-service/frontend/src/entrypoints/templates.css create mode 100644 matrix-authentication-service/frontend/src/entrypoints/templates.ts create mode 100644 matrix-authentication-service/frontend/src/gql/fragment-masking.ts create mode 100644 matrix-authentication-service/frontend/src/gql/gql.ts create mode 100644 matrix-authentication-service/frontend/src/gql/graphql.ts create mode 100644 matrix-authentication-service/frontend/src/gql/index.ts create mode 100644 matrix-authentication-service/frontend/src/graphql.ts create mode 100644 matrix-authentication-service/frontend/src/i18n.ts create mode 100644 matrix-authentication-service/frontend/src/i18n/password_changes.ts create mode 100644 matrix-authentication-service/frontend/src/pagination.ts create mode 100644 matrix-authentication-service/frontend/src/routeTree.gen.ts create mode 100644 matrix-authentication-service/frontend/src/router.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/__root.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/_account.index.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/_account.plan.index.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/_account.sessions.browsers.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/_account.sessions.index.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/_account.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/clients.$id.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/devices.$.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/emails.$id.in-use.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/emails.$id.verify.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/password.change.index.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/password.change.success.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/password.recovery.index.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/reset-cross-signing.cancelled.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/reset-cross-signing.index.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/reset-cross-signing.success.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/reset-cross-signing.tsx create mode 100644 matrix-authentication-service/frontend/src/routes/sessions.$id.tsx create mode 100644 matrix-authentication-service/frontend/src/styles/cpd-button.css create mode 100644 matrix-authentication-service/frontend/src/styles/cpd-checkbox-control.css create mode 100644 matrix-authentication-service/frontend/src/styles/cpd-form.css create mode 100644 matrix-authentication-service/frontend/src/styles/cpd-link.css create mode 100644 matrix-authentication-service/frontend/src/styles/cpd-mfa-control.css create mode 100644 matrix-authentication-service/frontend/src/styles/cpd-text-control.css create mode 100644 matrix-authentication-service/frontend/src/test-utils/mockLocale.ts create mode 100644 matrix-authentication-service/frontend/src/test-utils/render.tsx create mode 100644 matrix-authentication-service/frontend/src/test-utils/router.tsx create mode 100644 matrix-authentication-service/frontend/src/utils/dates.ts create mode 100644 matrix-authentication-service/frontend/src/utils/deviceIdFromScope.test.ts create mode 100644 matrix-authentication-service/frontend/src/utils/deviceIdFromScope.ts create mode 100644 matrix-authentication-service/frontend/src/utils/password_complexity/enwiki.json create mode 100644 matrix-authentication-service/frontend/src/utils/password_complexity/index.ts create mode 100644 matrix-authentication-service/frontend/src/utils/password_complexity/namesf.json create mode 100644 matrix-authentication-service/frontend/src/utils/password_complexity/namesm.json create mode 100644 matrix-authentication-service/frontend/src/utils/password_complexity/namess.json create mode 100644 matrix-authentication-service/frontend/src/utils/password_complexity/passwords.json create mode 100644 matrix-authentication-service/frontend/src/utils/password_complexity/ustvfilm.json create mode 100644 matrix-authentication-service/frontend/src/utils/simplifyUrl.ts create mode 100644 matrix-authentication-service/frontend/src/vite-env.d.ts create mode 100644 matrix-authentication-service/frontend/stories/routes/app.tsx create mode 100644 matrix-authentication-service/frontend/stories/routes/index.stories.tsx create mode 100644 matrix-authentication-service/frontend/stories/routes/reset-cross-signing.stories.tsx create mode 100644 matrix-authentication-service/frontend/tailwind.config.cjs create mode 100644 matrix-authentication-service/frontend/tests/mocks/handlers.ts create mode 100644 matrix-authentication-service/frontend/tests/routes/__snapshots__/reset-cross-signing.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/tests/routes/account/__snapshots__/index.test.tsx.snap create mode 100644 matrix-authentication-service/frontend/tests/routes/account/index.test.tsx create mode 100644 matrix-authentication-service/frontend/tests/routes/render.tsx create mode 100644 matrix-authentication-service/frontend/tests/routes/reset-cross-signing.test.tsx create mode 100644 matrix-authentication-service/frontend/tests/routes/types.d.ts create mode 100644 matrix-authentication-service/frontend/tsconfig.json create mode 100644 matrix-authentication-service/frontend/tsconfig.node.json create mode 100644 matrix-authentication-service/frontend/vite.config.ts create mode 100644 matrix-authentication-service/frontend/vitest.global-setup.ts create mode 100644 matrix-authentication-service/frontend/vitest.setup.ts create mode 100644 matrix-authentication-service/localazy.json create mode 100644 matrix-authentication-service/misc/build-docs.sh create mode 100755 matrix-authentication-service/misc/device-code-grant.sh create mode 100755 matrix-authentication-service/misc/sqlx_update.sh create mode 100755 matrix-authentication-service/misc/update.sh create mode 100644 matrix-authentication-service/policies/.gitignore create mode 100644 matrix-authentication-service/policies/.regal/config.yaml create mode 100644 matrix-authentication-service/policies/Makefile create mode 100644 matrix-authentication-service/policies/authorization_grant/authorization_grant.rego create mode 100644 matrix-authentication-service/policies/authorization_grant/authorization_grant_test.rego create mode 100644 matrix-authentication-service/policies/client_registration/client_registration.rego create mode 100644 matrix-authentication-service/policies/client_registration/client_registration_test.rego create mode 100644 matrix-authentication-service/policies/common/common.rego create mode 100644 matrix-authentication-service/policies/common/common_test.rego create mode 100644 matrix-authentication-service/policies/compat_login/compat_login.rego create mode 100644 matrix-authentication-service/policies/compat_login/compat_login_test.rego create mode 100644 matrix-authentication-service/policies/email/email.rego create mode 100644 matrix-authentication-service/policies/email/email_test.rego create mode 100644 matrix-authentication-service/policies/register/register.rego create mode 100644 matrix-authentication-service/policies/register/register_test.rego create mode 100644 matrix-authentication-service/policies/schema/authorization_grant_input.json create mode 100644 matrix-authentication-service/policies/schema/client_registration_input.json create mode 100644 matrix-authentication-service/policies/schema/compat_login_input.json create mode 100644 matrix-authentication-service/policies/schema/email_input.json create mode 100644 matrix-authentication-service/policies/schema/register_input.json create mode 100644 matrix-authentication-service/policies/util/coveralls.rego create mode 100644 matrix-authentication-service/templates/app.html create mode 100644 matrix-authentication-service/templates/base.html create mode 100644 matrix-authentication-service/templates/components/back_to_client.html create mode 100644 matrix-authentication-service/templates/components/button.html create mode 100644 matrix-authentication-service/templates/components/captcha.html create mode 100644 matrix-authentication-service/templates/components/errors.html create mode 100644 matrix-authentication-service/templates/components/field.html create mode 100644 matrix-authentication-service/templates/components/footer.html create mode 100644 matrix-authentication-service/templates/components/icon.html create mode 100644 matrix-authentication-service/templates/components/idp_brand.html create mode 100644 matrix-authentication-service/templates/components/logout.html create mode 100644 matrix-authentication-service/templates/components/scope.html create mode 100644 matrix-authentication-service/templates/device_name.txt create mode 100644 matrix-authentication-service/templates/emails/recovery.html create mode 100644 matrix-authentication-service/templates/emails/recovery.subject create mode 100644 matrix-authentication-service/templates/emails/recovery.txt create mode 100644 matrix-authentication-service/templates/emails/verification.html create mode 100644 matrix-authentication-service/templates/emails/verification.subject create mode 100644 matrix-authentication-service/templates/emails/verification.txt create mode 100644 matrix-authentication-service/templates/form_post.html create mode 100644 matrix-authentication-service/templates/pages/404.html create mode 100644 matrix-authentication-service/templates/pages/account/deactivated.html create mode 100644 matrix-authentication-service/templates/pages/account/locked.html create mode 100644 matrix-authentication-service/templates/pages/account/logged_out.html create mode 100644 matrix-authentication-service/templates/pages/compat_login_policy_violation.html create mode 100644 matrix-authentication-service/templates/pages/consent.html create mode 100644 matrix-authentication-service/templates/pages/device_consent.html create mode 100644 matrix-authentication-service/templates/pages/device_link.html create mode 100644 matrix-authentication-service/templates/pages/error.html create mode 100644 matrix-authentication-service/templates/pages/index.html create mode 100644 matrix-authentication-service/templates/pages/login.html create mode 100644 matrix-authentication-service/templates/pages/policy_violation.html create mode 100644 matrix-authentication-service/templates/pages/reauth.html create mode 100644 matrix-authentication-service/templates/pages/recovery/consumed.html create mode 100644 matrix-authentication-service/templates/pages/recovery/disabled.html create mode 100644 matrix-authentication-service/templates/pages/recovery/expired.html create mode 100644 matrix-authentication-service/templates/pages/recovery/finish.html create mode 100644 matrix-authentication-service/templates/pages/recovery/progress.html create mode 100644 matrix-authentication-service/templates/pages/recovery/start.html create mode 100644 matrix-authentication-service/templates/pages/register/index.html create mode 100644 matrix-authentication-service/templates/pages/register/password.html create mode 100644 matrix-authentication-service/templates/pages/register/steps/display_name.html create mode 100644 matrix-authentication-service/templates/pages/register/steps/email_in_use.html create mode 100644 matrix-authentication-service/templates/pages/register/steps/registration_token.html create mode 100644 matrix-authentication-service/templates/pages/register/steps/verify_email.html create mode 100644 matrix-authentication-service/templates/pages/sso.html create mode 100644 matrix-authentication-service/templates/pages/upstream_oauth2/do_register.html create mode 100644 matrix-authentication-service/templates/pages/upstream_oauth2/link_mismatch.html create mode 100644 matrix-authentication-service/templates/pages/upstream_oauth2/suggest_link.html create mode 100644 matrix-authentication-service/templates/swagger/doc.html create mode 100644 matrix-authentication-service/templates/swagger/oauth2-redirect.html create mode 100644 matrix-authentication-service/translations/cs.json create mode 100644 matrix-authentication-service/translations/da.json create mode 100644 matrix-authentication-service/translations/de.json create mode 100644 matrix-authentication-service/translations/en.json create mode 100644 matrix-authentication-service/translations/et.json create mode 100644 matrix-authentication-service/translations/fi.json create mode 100644 matrix-authentication-service/translations/fr.json create mode 100644 matrix-authentication-service/translations/hu.json create mode 100644 matrix-authentication-service/translations/nb-NO.json create mode 100644 matrix-authentication-service/translations/nl.json create mode 100644 matrix-authentication-service/translations/pl.json create mode 100644 matrix-authentication-service/translations/pt.json create mode 100644 matrix-authentication-service/translations/ru.json create mode 100644 matrix-authentication-service/translations/sv.json create mode 100644 matrix-authentication-service/translations/uk.json create mode 100644 matrix-authentication-service/translations/zh-Hans.json diff --git a/matrix-authentication-service/.cargo/config.toml b/matrix-authentication-service/.cargo/config.toml new file mode 100644 index 00000000..18f6fb28 --- /dev/null +++ b/matrix-authentication-service/.cargo/config.toml @@ -0,0 +1,12 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[build] +rustflags = ["--cfg", "tokio_unstable"] + +# On x86_64, we target the x86-64-v2 psABI, as it is a good compromise between +# modern CPU instructions and compatibility. +[target.x86_64-unknown-linux-gnu] +rustflags = ["--cfg", "tokio_unstable", "-C", "target-cpu=x86-64-v2"] diff --git a/matrix-authentication-service/.codecov.yml b/matrix-authentication-service/.codecov.yml new file mode 100644 index 00000000..a946b3ce --- /dev/null +++ b/matrix-authentication-service/.codecov.yml @@ -0,0 +1,10 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +comment: false + +flag_management: + default_rules: + carryforward: true diff --git a/matrix-authentication-service/.config/nextest.toml b/matrix-authentication-service/.config/nextest.toml new file mode 100644 index 00000000..7ed06faa --- /dev/null +++ b/matrix-authentication-service/.config/nextest.toml @@ -0,0 +1,7 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[profile.default] +retries = 1 diff --git a/matrix-authentication-service/.dockerignore b/matrix-authentication-service/.dockerignore new file mode 100644 index 00000000..01fcc861 --- /dev/null +++ b/matrix-authentication-service/.dockerignore @@ -0,0 +1,17 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +target/ +crates/*/target +crates/*/node_modules +frontend/node_modules +frontend/dist +docs/ +.devcontainer/ +.github/ +.gitignore +Dockerfile +.dockerignore +docker-bake.hcl diff --git a/matrix-authentication-service/.editorconfig b/matrix-authentication-service/.editorconfig new file mode 100644 index 00000000..ccbe6ed4 --- /dev/null +++ b/matrix-authentication-service/.editorconfig @@ -0,0 +1,16 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +root = true + +[*] +charset=utf-8 +end_of_line = lf + +[*.{ts,tsx,cts,mts,js,cjs,mjs,css,json,graphql}] +indent_style = space +indent_size = 2 +insert_final_newline = true +trim_trailing_whitespace = true diff --git a/matrix-authentication-service/.github/CODEOWNERS b/matrix-authentication-service/.github/CODEOWNERS new file mode 100644 index 00000000..22269718 --- /dev/null +++ b/matrix-authentication-service/.github/CODEOWNERS @@ -0,0 +1 @@ +* @element-hq/mas-maintainers diff --git a/matrix-authentication-service/.github/actions/build-frontend/action.yml b/matrix-authentication-service/.github/actions/build-frontend/action.yml new file mode 100644 index 00000000..08e2cf6f --- /dev/null +++ b/matrix-authentication-service/.github/actions/build-frontend/action.yml @@ -0,0 +1,25 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Build the frontend assets +description: Installs Node.js and builds the frontend assets from the frontend directory + +runs: + using: composite + steps: + - name: Install Node + uses: actions/setup-node@v6.0.0 + with: + node-version: "24" + + - name: Install dependencies + run: npm ci + working-directory: ./frontend + shell: sh + + - name: Build the frontend assets + run: npm run build + working-directory: ./frontend + shell: sh diff --git a/matrix-authentication-service/.github/actions/build-policies/action.yml b/matrix-authentication-service/.github/actions/build-policies/action.yml new file mode 100644 index 00000000..6264047e --- /dev/null +++ b/matrix-authentication-service/.github/actions/build-policies/action.yml @@ -0,0 +1,21 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Build the Open Policy Agent policies +description: Installs OPA and builds the policies + +runs: + using: composite + steps: + - name: Install Open Policy Agent + uses: open-policy-agent/setup-opa@v2.2.0 + with: + # Keep in sync with the Dockerfile and policies/Makefile + version: 1.13.1 + + - name: Build the policies + run: make + working-directory: ./policies + shell: sh diff --git a/matrix-authentication-service/.github/dependabot.yml b/matrix-authentication-service/.github/dependabot.yml new file mode 100644 index 00000000..8b67a141 --- /dev/null +++ b/matrix-authentication-service/.github/dependabot.yml @@ -0,0 +1,113 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +version: 2 +updates: + - package-ecosystem: "cargo" + directory: "/" + labels: + - "A-Dependencies" + - "Z-Deps-Backend" + schedule: + interval: "daily" + ignore: + # We plan to remove apalis soon, let's ignore it for now + - dependency-name: "apalis" + - dependency-name: "apalis-*" + groups: + axum: + patterns: + - "axum" + - "axum-*" + opentelemetry: + patterns: + - "opentelemetry" + - "opentelemetry_sdk" + - "opentelemetry-*" + - "tracing-opentelemetry" + sea-query: + patterns: + - "sea-query" + - "sea-query-*" + sentry: + patterns: + - "sentry" + - "sentry-*" + tracing: + patterns: + - "tracing-*" + exclude-patterns: + - "tracing-opentelemetry" + icu: + patterns: + - "icu" + - "icu_*" + + - package-ecosystem: "github-actions" + directory: "/" + labels: + - "A-Dependencies" + - "Z-Deps-CI" + schedule: + interval: "daily" + + - package-ecosystem: "npm" + directory: "/frontend/" + labels: + - "A-Dependencies" + - "Z-Deps-Frontend" + schedule: + interval: "daily" + groups: + storybook: + patterns: + - "storybook" + - "storybook-*" + - "@storybook/*" + fontsource: + patterns: + - "@fontsource/*" + vitest: + patterns: + - "vitest" + - "@vitest/*" + vite: + patterns: + - "vite" + - "@vitejs/*" + - "vite-*" + i18next: + patterns: + - "i18next" + - "i18next-*" + - "react-i18next" + react: + patterns: + - "react" + - "react-*" + exclude-patterns: + - "react-i18next" + jotai: + patterns: + - "jotai" + - "jotai-*" + graphql-codegen: + patterns: + - "@graphql-codegen/*" + tanstack-router: + patterns: + - "@tanstack/react-router" + - "@tanstack/react-router-*" + - "@tanstack/router-*" + tanstack-query: + patterns: + - "@tanstack/react-query" + - "@tanstack/react-query-*" + types: + patterns: + - "@types/*" + browser-logos: + patterns: + - "@browser-logos/*" diff --git a/matrix-authentication-service/.github/release.yml b/matrix-authentication-service/.github/release.yml new file mode 100644 index 00000000..3633ae68 --- /dev/null +++ b/matrix-authentication-service/.github/release.yml @@ -0,0 +1,45 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +changelog: + categories: + - title: Bug Fixes + labels: + - T-Defect + + - title: New Features + labels: + - T-Enhancement + exclude: + labels: + - A-Admin-API + - A-Documentation + + - title: Changes to the admin API + labels: + - A-Admin-API + + - title: Documentation + labels: + - A-Documentation + + - title: Translations + labels: + - A-I18n + + - title: Internal Changes + labels: + - T-Task + + - title: Other Changes + labels: + - "*" + exclude: + labels: + - A-Dependencies + + - title: Dependency Updates + labels: + - A-Dependencies diff --git a/matrix-authentication-service/.github/scripts/.gitignore b/matrix-authentication-service/.github/scripts/.gitignore new file mode 100644 index 00000000..efa4841d --- /dev/null +++ b/matrix-authentication-service/.github/scripts/.gitignore @@ -0,0 +1,7 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +node_modules/ +package-lock.json diff --git a/matrix-authentication-service/.github/scripts/cleanup-pr.cjs b/matrix-authentication-service/.github/scripts/cleanup-pr.cjs new file mode 100644 index 00000000..43db8572 --- /dev/null +++ b/matrix-authentication-service/.github/scripts/cleanup-pr.cjs @@ -0,0 +1,44 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// @ts-check + +/** @param {import('@actions/github-script').AsyncFunctionArguments} AsyncFunctionArguments */ +module.exports = async ({ github, context }) => { + const metadataJson = process.env.BUILD_IMAGE_MANIFEST; + if (!metadataJson) throw new Error("BUILD_IMAGE_MANIFEST is not defined"); + /** @type {Record} */ + const metadata = JSON.parse(metadataJson); + + await github.rest.issues.removeLabel({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + name: "Z-Build-Workflow", + }); + + const tagListMarkdown = metadata.regular.tags + .map((tag) => `- \`${tag}\``) + .join("\n"); + + // Get the workflow run + const run = await github.rest.actions.getWorkflowRun({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: context.runId, + }); + + await github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: `A build for this PR at commit ${context.sha} has been created through the Z-Build-Workflow label by ${context.actor}. + +Docker image is available at: +${tagListMarkdown} + +Pre-built binaries are available through the [workflow run artifacts](${run.data.html_url}).`, + }); +}; diff --git a/matrix-authentication-service/.github/scripts/commit-and-tag.cjs b/matrix-authentication-service/.github/scripts/commit-and-tag.cjs new file mode 100644 index 00000000..086e1b83 --- /dev/null +++ b/matrix-authentication-service/.github/scripts/commit-and-tag.cjs @@ -0,0 +1,66 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// @ts-check + +/** @param {import('@actions/github-script').AsyncFunctionArguments} AsyncFunctionArguments */ +module.exports = async ({ github, context }) => { + const fs = require("node:fs/promises"); + const { owner, repo } = context.repo; + const version = process.env.VERSION; + const parent = context.sha; + if (!version) throw new Error("VERSION is not defined"); + + const files = ["Cargo.toml", "Cargo.lock"]; + + /** @type {{path: string, mode: "100644", type: "blob", sha: string}[]} */ + const tree = []; + for (const file of files) { + const content = await fs.readFile(file); + const blob = await github.rest.git.createBlob({ + owner, + repo, + content: content.toString("base64"), + encoding: "base64", + }); + console.log(`Created blob for ${file}:`, blob.data.url); + + tree.push({ + path: file, + mode: "100644", + type: "blob", + sha: blob.data.sha, + }); + } + + const treeObject = await github.rest.git.createTree({ + owner, + repo, + tree, + base_tree: parent, + }); + console.log("Created tree:", treeObject.data.url); + + const commit = await github.rest.git.createCommit({ + owner, + repo, + message: version, + parents: [parent], + tree: treeObject.data.sha, + }); + console.log("Created commit:", commit.data.url); + + const tag = await github.rest.git.createTag({ + owner, + repo, + tag: `v${version}`, + message: version, + type: "commit", + object: commit.data.sha, + }); + console.log("Created tag:", tag.data.url); + + return { commit: commit.data.sha, tag: tag.data.sha }; +}; diff --git a/matrix-authentication-service/.github/scripts/create-release-branch.cjs b/matrix-authentication-service/.github/scripts/create-release-branch.cjs new file mode 100644 index 00000000..2508765f --- /dev/null +++ b/matrix-authentication-service/.github/scripts/create-release-branch.cjs @@ -0,0 +1,22 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// @ts-check + +/** @param {import('@actions/github-script').AsyncFunctionArguments} AsyncFunctionArguments */ +module.exports = async ({ github, context }) => { + const { owner, repo } = context.repo; + const branch = process.env.BRANCH; + const sha = process.env.SHA; + if (!sha) throw new Error("SHA is not defined"); + + await github.rest.git.createRef({ + owner, + repo, + ref: `refs/heads/${branch}`, + sha, + }); + console.log(`Created branch ${branch} from ${sha}`); +}; diff --git a/matrix-authentication-service/.github/scripts/create-version-tag.cjs b/matrix-authentication-service/.github/scripts/create-version-tag.cjs new file mode 100644 index 00000000..47e00ecb --- /dev/null +++ b/matrix-authentication-service/.github/scripts/create-version-tag.cjs @@ -0,0 +1,24 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// @ts-check + +/** @param {import('@actions/github-script').AsyncFunctionArguments} AsyncFunctionArguments */ +module.exports = async ({ github, context }) => { + const { owner, repo } = context.repo; + const version = process.env.VERSION; + const tagSha = process.env.TAG_SHA; + + if (!version) throw new Error("VERSION is not defined"); + if (!tagSha) throw new Error("TAG_SHA is not defined"); + + const tag = await github.rest.git.createRef({ + owner, + repo, + ref: `refs/tags/v${version}`, + sha: tagSha, + }); + console.log("Created tag ref:", tag.data.url); +}; diff --git a/matrix-authentication-service/.github/scripts/merge-back.cjs b/matrix-authentication-service/.github/scripts/merge-back.cjs new file mode 100644 index 00000000..d3948398 --- /dev/null +++ b/matrix-authentication-service/.github/scripts/merge-back.cjs @@ -0,0 +1,60 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// @ts-check + +/** @param {import('@actions/github-script').AsyncFunctionArguments} AsyncFunctionArguments */ +module.exports = async ({ github, context }) => { + const { owner, repo } = context.repo; + const sha = process.env.SHA; + const branch = `ref-merge/${sha}`; + if (!sha) throw new Error("SHA is not defined"); + + await github.rest.git.createRef({ + owner, + repo, + ref: `refs/heads/${branch}`, + sha, + }); + console.log(`Created branch ${branch} to ${sha}`); + + // Create a PR to merge the branch back to main + const pr = await github.rest.pulls.create({ + owner, + repo, + head: branch, + base: "main", + title: "Automatic merge back to main", + body: "This pull request was automatically created by the release workflow. It merges the release branch back to main.", + maintainer_can_modify: true, + }); + console.log( + `Created pull request #${pr.data.number} to merge the release branch back to main`, + ); + console.log(`PR URL: ${pr.data.html_url}`); + + // Add the `T-Task` label to the PR + await github.rest.issues.addLabels({ + owner, + repo, + issue_number: pr.data.number, + labels: ["T-Task"], + }); + + // Enable auto-merge on the PR + await github.graphql( + ` + mutation AutoMerge($id: ID!) { + enablePullRequestAutoMerge(input: { + pullRequestId: $id, + mergeMethod: MERGE, + }) { + clientMutationId + } + } + `, + { id: pr.data.node_id }, + ); +}; diff --git a/matrix-authentication-service/.github/scripts/package.json b/matrix-authentication-service/.github/scripts/package.json new file mode 100644 index 00000000..8fc6ec2c --- /dev/null +++ b/matrix-authentication-service/.github/scripts/package.json @@ -0,0 +1,7 @@ +{ + "private": true, + "devDependencies": { + "@actions/github-script": "github:actions/github-script", + "typescript": "^5.7.3" + } +} diff --git a/matrix-authentication-service/.github/scripts/update-release-branch.cjs b/matrix-authentication-service/.github/scripts/update-release-branch.cjs new file mode 100644 index 00000000..78dbb468 --- /dev/null +++ b/matrix-authentication-service/.github/scripts/update-release-branch.cjs @@ -0,0 +1,22 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// @ts-check + +/** @param {import('@actions/github-script').AsyncFunctionArguments} AsyncFunctionArguments */ +module.exports = async ({ github, context }) => { + const { owner, repo } = context.repo; + const branch = process.env.BRANCH; + const sha = process.env.SHA; + if (!sha) throw new Error("SHA is not defined"); + + await github.rest.git.updateRef({ + owner, + repo, + ref: `heads/${branch}`, + sha, + }); + console.log(`Updated branch ${branch} to ${sha}`); +}; diff --git a/matrix-authentication-service/.github/scripts/update-unstable-tag.cjs b/matrix-authentication-service/.github/scripts/update-unstable-tag.cjs new file mode 100644 index 00000000..765e85d6 --- /dev/null +++ b/matrix-authentication-service/.github/scripts/update-unstable-tag.cjs @@ -0,0 +1,21 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// @ts-check + +/** @param {import('@actions/github-script').AsyncFunctionArguments} AsyncFunctionArguments */ +module.exports = async ({ github, context }) => { + const { owner, repo } = context.repo; + const sha = context.sha; + + const tag = await github.rest.git.updateRef({ + owner, + repo, + force: true, + ref: "tags/unstable", + sha, + }); + console.log("Updated tag ref:", tag.data.url); +}; diff --git a/matrix-authentication-service/.github/workflows/build.yaml b/matrix-authentication-service/.github/workflows/build.yaml new file mode 100644 index 00000000..0eb691e9 --- /dev/null +++ b/matrix-authentication-service/.github/workflows/build.yaml @@ -0,0 +1,469 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Build + +on: + push: + branches: + - main + - "release/**" + tags: + - "v*" + + # Run when there is a label change on the pull request + # This runs only if the 'Z-Build-Workflow' is added to the pull request + pull_request: + types: [labeled] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + CARGO_TERM_COLOR: always + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + SCCACHE_GHA_ENABLED: "true" + RUSTC_WRAPPER: "sccache" + IMAGE: ghcr.io/element-hq/matrix-authentication-service + BUILDCACHE: ghcr.io/element-hq/matrix-authentication-service/buildcache + DOCKER_METADATA_ANNOTATIONS_LEVELS: manifest,index + +jobs: + compute-version: + name: Compute version using git describe + if: github.event_name == 'push' || github.event.label.name == 'Z-Build-Workflow' + runs-on: ubuntu-24.04 + + permissions: + contents: read + + outputs: + describe: ${{ steps.git.outputs.describe }} + timestamp: ${{ steps.git.outputs.timestamp }} + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + with: + # Need a full clone so that `git describe` reports the right version + fetch-depth: 0 + + - name: Compute version and timestamp out of git history + id: git + run: | + echo "describe=$(git describe --tags --match 'v*.*.*' --always)" >> $GITHUB_OUTPUT + echo "timestamp=$(git log -1 --format=%ct)" >> $GITHUB_OUTPUT + + build-assets: + name: Build assets + if: github.event_name == 'push' || github.event.label.name == 'Z-Build-Workflow' + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - uses: ./.github/actions/build-frontend + - uses: ./.github/actions/build-policies + + - name: Prepare assets artifact + run: | + mkdir -p assets-dist/share + cp policies/policy.wasm assets-dist/share/policy.wasm + cp frontend/dist/manifest.json assets-dist/share/manifest.json + cp -r frontend/dist/ assets-dist/share/assets + cp -r templates/ assets-dist/share/templates + cp -r translations/ assets-dist/share/translations + cp LICENSE assets-dist/LICENSE + chmod -R u=rwX,go=rX assets-dist/ + + - name: Upload assets + uses: actions/upload-artifact@v6.0.0 + with: + name: assets + path: assets-dist + + build-binaries: + name: Build binaries + if: github.event_name == 'push' || github.event.label.name == 'Z-Build-Workflow' + runs-on: ubuntu-24.04 + + needs: + - compute-version + + strategy: + matrix: + include: + - target: x86_64-unknown-linux-gnu + - target: aarch64-unknown-linux-gnu + + env: + VERGEN_GIT_DESCRIBE: ${{ needs.compute-version.outputs.describe }} + SOURCE_DATE_EPOCH: ${{ needs.compute-version.outputs.timestamp }} + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + with: + targets: | + ${{ matrix.target }} + + - name: Setup sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Install zig + uses: goto-bus-stop/setup-zig@v2 + with: + version: 0.13.0 + + - name: Install cargo-zigbuild + uses: taiki-e/install-action@v2 + with: + tool: cargo-zigbuild + + - name: Build the binary + run: | + cargo zigbuild \ + --release \ + --target ${{ matrix.target }}.2.17 \ + --no-default-features \ + --features dist \ + -p mas-cli + + - name: Upload binary artifact + uses: actions/upload-artifact@v6.0.0 + with: + name: binary-${{ matrix.target }} + path: target/${{ matrix.target }}/release/mas-cli + + assemble-archives: + name: Assemble release archives + if: github.event_name == 'push' || github.event.label.name == 'Z-Build-Workflow' + runs-on: ubuntu-24.04 + + needs: + - build-assets + - build-binaries + + permissions: + contents: read + + steps: + - name: Download assets + uses: actions/download-artifact@v7 + with: + name: assets + path: assets-dist + + - name: Download binary x86_64 + uses: actions/download-artifact@v7 + with: + name: binary-x86_64-unknown-linux-gnu + path: binary-x86_64 + + - name: Download binary aarch64 + uses: actions/download-artifact@v7 + with: + name: binary-aarch64-unknown-linux-gnu + path: binary-aarch64 + + - name: Create final archives + run: | + for arch in x86_64 aarch64; do + mkdir -p dist/${arch}/share + cp -r assets-dist/share/* dist/${arch}/share/ + cp assets-dist/LICENSE dist/${arch}/LICENSE + cp binary-$arch/mas-cli dist/${arch}/mas-cli + chmod -R u=rwX,go=rX dist/${arch}/ + chmod u=rwx,go=rx dist/${arch}/mas-cli + tar -czvf mas-cli-${arch}-linux.tar.gz --owner=0 --group=0 -C dist/${arch}/ . + done + + - name: Upload aarch64 archive + uses: actions/upload-artifact@v6.0.0 + with: + name: mas-cli-aarch64-linux + path: mas-cli-aarch64-linux.tar.gz + + - name: Upload x86_64 archive + uses: actions/upload-artifact@v6.0.0 + with: + name: mas-cli-x86_64-linux + path: mas-cli-x86_64-linux.tar.gz + + build-image: + name: Build and push Docker image + if: github.event_name == 'push' || github.event.label.name == 'Z-Build-Workflow' + runs-on: ubuntu-24.04 + + outputs: + metadata: ${{ steps.output.outputs.metadata }} + + permissions: + contents: read + packages: write + id-token: write + + needs: + - compute-version + + env: + VERGEN_GIT_DESCRIBE: ${{ needs.compute-version.outputs.describe }} + SOURCE_DATE_EPOCH: ${{ needs.compute-version.outputs.timestamp }} + + steps: + - name: Docker meta + id: meta + uses: docker/metadata-action@v5.10.0 + with: + images: "${{ env.IMAGE }}" + bake-target: docker-metadata-action + flavor: | + latest=auto + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + type=sha + + - name: Docker meta (debug variant) + id: meta-debug + uses: docker/metadata-action@v5.10.0 + with: + images: "${{ env.IMAGE }}" + bake-target: docker-metadata-action-debug + flavor: | + latest=auto + suffix=-debug,onlatest=true + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + type=sha + + - name: Setup Cosign + uses: sigstore/cosign-installer@v4.0.0 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3.12.0 + with: + buildkitd-config-inline: | + [registry."docker.io"] + mirrors = ["mirror.gcr.io"] + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3.7.0 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + id: bake + uses: docker/bake-action@v6.10.0 + with: + files: | + ./docker-bake.hcl + cwd://${{ steps.meta.outputs.bake-file }} + cwd://${{ steps.meta-debug.outputs.bake-file }} + set: | + base.output=type=image,push=true + base.cache-from=type=registry,ref=${{ env.BUILDCACHE }}:buildcache + base.cache-to=type=registry,ref=${{ env.BUILDCACHE }}:buildcache,mode=max + + - name: Transform bake output + # This transforms the ouput to an object which looks like this: + # { reguar: { digest: "…", tags: ["…", "…"] }, debug: { digest: "…", tags: ["…"] }, … } + id: output + run: | + echo 'metadata<> $GITHUB_OUTPUT + echo '${{ steps.bake.outputs.metadata }}' | jq -c 'with_entries(select(.value | (type == "object" and has("containerimage.digest")))) | map_values({ digest: .["containerimage.digest"], tags: (.["image.name"] | split(",")) })' >> $GITHUB_OUTPUT + echo 'EOF' >> $GITHUB_OUTPUT + + - name: Sign the images with GitHub Actions provided token + # Only sign on tags and on commits on main branch + if: | + github.event_name != 'pull_request' + && (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') + + env: + REGULAR_DIGEST: ${{ steps.output.outputs.metadata && fromJSON(steps.output.outputs.metadata).regular.digest }} + DEBUG_DIGEST: ${{ steps.output.outputs.metadata && fromJSON(steps.output.outputs.metadata).debug.digest }} + + run: |- + cosign sign --yes \ + "$IMAGE@$REGULAR_DIGEST" \ + "$IMAGE@$DEBUG_DIGEST" \ + + release: + name: Release + if: startsWith(github.ref, 'refs/tags/') + runs-on: ubuntu-24.04 + needs: + - assemble-archives + - build-image + steps: + - name: Download the artifacts from the previous job + uses: actions/download-artifact@v7 + with: + pattern: mas-cli-* + path: artifacts + merge-multiple: true + + - name: Prepare a release + uses: softprops/action-gh-release@v2.5.0 + with: + generate_release_notes: true + body: | + ### Docker image + + Regular image: + + - Digest: + ``` + ${{ env.IMAGE }}@${{ fromJSON(needs.build-image.outputs.metadata).regular.digest }} + ``` + - Tags: + ``` + ${{ join(fromJSON(needs.build-image.outputs.metadata).regular.tags, ' + ') }} + ``` + + Debug variant: + + - Digest: + ``` + ${{ env.IMAGE }}@${{ fromJSON(needs.build-image.outputs.metadata).debug.digest }} + ``` + - Tags: + ``` + ${{ join(fromJSON(needs.build-image.outputs.metadata).debug.tags, ' + ') }} + ``` + + files: | + artifacts/mas-cli-aarch64-linux.tar.gz + artifacts/mas-cli-x86_64-linux.tar.gz + draft: true + + unstable: + name: Update the unstable release + if: github.ref == 'refs/heads/main' + runs-on: ubuntu-24.04 + + needs: + - assemble-archives + - build-image + + permissions: + contents: write + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + with: + sparse-checkout: | + .github/scripts + + - name: Download the artifacts from the previous job + uses: actions/download-artifact@v7 + with: + pattern: mas-cli-* + path: artifacts + merge-multiple: true + + - name: Update unstable git tag + uses: actions/github-script@v8.0.0 + with: + script: | + const script = require('./.github/scripts/update-unstable-tag.cjs'); + await script({ core, github, context }); + + - name: Update unstable release + uses: softprops/action-gh-release@v2.5.0 + with: + name: "Unstable build" + tag_name: unstable + body: | + This is an automatically updated unstable release containing the latest builds from the main branch. + + **⚠️ Warning: These are development builds and may be unstable.** + + Last updated: ${{ github.event.head_commit.timestamp }} + Commit: ${{ github.sha }} + + ### Docker image + + Regular image: + + - Digest: + ``` + ${{ env.IMAGE }}@${{ fromJSON(needs.build-image.outputs.metadata).regular.digest }} + ``` + - Tags: + ``` + ${{ join(fromJSON(needs.build-image.outputs.metadata).regular.tags, ' + ') }} + ``` + + Debug variant: + + - Digest: + ``` + ${{ env.IMAGE }}@${{ fromJSON(needs.build-image.outputs.metadata).debug.digest }} + ``` + - Tags: + ``` + ${{ join(fromJSON(needs.build-image.outputs.metadata).debug.tags, ' + ') }} + ``` + + files: | + artifacts/mas-cli-aarch64-linux.tar.gz + artifacts/mas-cli-x86_64-linux.tar.gz + prerelease: true + make_latest: false + + pr-cleanup: + name: "Remove workflow build PR label and comment on it" + runs-on: ubuntu-24.04 + if: github.event_name == 'pull_request' && github.event.label.name == 'Z-Build-Workflow' + + needs: + - build-image + + permissions: + contents: read + pull-requests: write + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + with: + sparse-checkout: | + .github/scripts + + - name: Remove label and comment + uses: actions/github-script@v8.0.0 + env: + BUILD_IMAGE_MANIFEST: ${{ needs.build-image.outputs.metadata }} + with: + script: | + const script = require('./.github/scripts/cleanup-pr.cjs'); + await script({ core, github, context }); diff --git a/matrix-authentication-service/.github/workflows/ci.yaml b/matrix-authentication-service/.github/workflows/ci.yaml new file mode 100644 index 00000000..ba90f2be --- /dev/null +++ b/matrix-authentication-service/.github/workflows/ci.yaml @@ -0,0 +1,338 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: CI + +on: + push: + branches: + - main + - "release/**" + tags: + - "v*" + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + CARGO_TERM_COLOR: always + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + SCCACHE_GHA_ENABLED: "true" + RUSTC_WRAPPER: "sccache" + +jobs: + opa-lint: + name: Lint and test OPA policies + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - uses: ./.github/actions/build-policies + + - name: Setup Regal + uses: StyraInc/setup-regal@v1 + with: + # Keep in sync with policies/Makefile + version: 0.38.1 + + - name: Lint policies + working-directory: ./policies + run: make lint + + - name: Run OPA tests + working-directory: ./policies + run: make test + + frontend-lint: + name: Check frontend style + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Node + uses: actions/setup-node@v6.2.0 + with: + node-version: 24 + + - name: Install Node dependencies + working-directory: ./frontend + run: npm ci + + - name: Lint + working-directory: ./frontend + run: npm run lint + + frontend-test: + name: Run the frontend test suite + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Node + uses: actions/setup-node@v6.2.0 + with: + node-version: 24 + + - name: Install Node dependencies + working-directory: ./frontend + run: npm ci + + - name: Test + working-directory: ./frontend + run: npm test + + frontend-knip: + name: Check the frontend for unused dependencies + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Node + uses: actions/setup-node@v6.2.0 + with: + node-version: 24 + + - name: Install Node dependencies + working-directory: ./frontend + run: npm ci + + - name: Check for unused dependencies + working-directory: ./frontend + run: npm run knip + + rustfmt: + name: Check Rust style + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@nightly + with: + components: rustfmt + + - name: Check style + run: cargo fmt --all -- --check + + cargo-deny: + name: Run `cargo deny` checks + runs-on: ubuntu-24.04 + + env: + # We need to remove the sccache wrapper because we don't install it in this job + RUSTC_WRAPPER: "" + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Run `cargo-deny` + uses: EmbarkStudios/cargo-deny-action@v2.0.15 + with: + rust-version: stable + + check-schema: + name: Check schema + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + run: | + rustup toolchain install stable + rustup default stable + + - name: Setup sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - uses: ./.github/actions/build-frontend + + - name: Update the schemas + run: sh ./misc/update.sh + + - name: Check that the workspace is clean + run: | + if ! [[ -z $(git status -s) ]]; then + echo "::error title=Workspace is not clean::Please run 'sh ./misc/update.sh' and commit the changes" + + ( + echo '## Diff after running `sh ./misc/update.sh`:' + echo + echo '```diff' + git diff + echo '```' + ) >> $GITHUB_STEP_SUMMARY + + exit 1 + fi + + clippy: + name: Run Clippy + needs: [rustfmt, opa-lint] + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@1.93.0 + with: + components: clippy + + - uses: ./.github/actions/build-policies + + - name: Setup sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Run clippy + run: | + cargo clippy --workspace --tests --bins --lib -- -D warnings + + compile-test-artifacts: + name: Compile test artifacts + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Install nextest + uses: taiki-e/install-action@v2 + with: + tool: cargo-nextest + + - name: Setup sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Build and archive tests + run: cargo nextest archive --workspace --archive-file nextest-archive.tar.zst + env: + SQLX_OFFLINE: "1" + + - name: Upload archive to workflow + uses: actions/upload-artifact@v6.0.0 + with: + name: nextest-archive + path: nextest-archive.tar.zst + + test: + name: Run test suite with Rust stable + needs: [rustfmt, opa-lint, compile-test-artifacts] + runs-on: ubuntu-24.04 + + permissions: + contents: read + + strategy: + matrix: + partition: [1, 2, 3] + + services: + postgres: + image: docker.io/library/postgres:15.3 + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - "5432:5432" + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Install nextest + uses: taiki-e/install-action@v2 + with: + tool: cargo-nextest + + - uses: ./.github/actions/build-frontend + - uses: ./.github/actions/build-policies + + - name: Download archive + uses: actions/download-artifact@v7 + with: + name: nextest-archive + + - name: Test + env: + DATABASE_URL: postgresql://postgres:postgres@localhost/postgres + run: | + ~/.cargo/bin/cargo-nextest nextest run \ + --archive-file nextest-archive.tar.zst \ + --partition count:${{ matrix.partition }}/3 + + tests-done: + name: Tests done + if: ${{ always() }} + needs: + - opa-lint + - frontend-lint + - frontend-test + - frontend-knip + - rustfmt + - cargo-deny + - clippy + - check-schema + - test + runs-on: ubuntu-24.04 + + steps: + - uses: matrix-org/done-action@v3 + with: + needs: ${{ toJSON(needs) }} diff --git a/matrix-authentication-service/.github/workflows/coverage.yaml b/matrix-authentication-service/.github/workflows/coverage.yaml new file mode 100644 index 00000000..4f03bd3c --- /dev/null +++ b/matrix-authentication-service/.github/workflows/coverage.yaml @@ -0,0 +1,139 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Coverage + +on: + push: + branches: [main] + pull_request: + branches: [main] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: false + +env: + CARGO_TERM_COLOR: always + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + +jobs: + opa: + name: Run OPA test suite with coverage + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - uses: ./.github/actions/build-policies + + - name: Run OPA tests with coverage + working-directory: ./policies + run: make coverage + + - name: Upload to codecov.io + uses: codecov/codecov-action@v5.5.2 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: policies/coverage.json + flags: policies + + frontend: + name: Run frontend test suite with coverage + runs-on: ubuntu-24.04 + + permissions: + id-token: write + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - uses: ./.github/actions/build-frontend + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + - name: Test + working-directory: ./frontend + run: npm run coverage + + - name: Upload to codecov.io + uses: codecov/codecov-action@v5.5.2 + with: + token: ${{ secrets.CODECOV_TOKEN }} + directory: frontend/coverage/ + flags: frontend + + rust: + name: Run Rust test suite with coverage + runs-on: ubuntu-24.04 + + permissions: + contents: read + + env: + SCCACHE_GHA_ENABLED: "true" + RUSTC_WRAPPER: "sccache" + + services: + postgres: + image: docker.io/library/postgres:15.3 + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - "5432:5432" + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + with: + components: llvm-tools-preview + + - name: Setup sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Install grcov + uses: taiki-e/install-action@v2 + with: + tool: grcov + + - uses: ./.github/actions/build-frontend + - uses: ./.github/actions/build-policies + + - name: Run test suite with profiling enabled + run: | + cargo test --no-fail-fast --workspace + env: + RUSTFLAGS: "-Cinstrument-coverage --cfg tokio_unstable" + LLVM_PROFILE_FILE: "cargo-test-%p-%m.profraw" + DATABASE_URL: postgresql://postgres:postgres@localhost/postgres + SQLX_OFFLINE: "1" + + - name: Build grcov report + run: | + mkdir -p target/coverage + grcov . --binary-path ./target/debug/deps/ -s . -t lcov --branch --ignore-not-existing --ignore '../*' --ignore "/*" -o target/coverage/tests.lcov + + - name: Upload to codecov.io + uses: codecov/codecov-action@v5.5.2 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: target/coverage/*.lcov + flags: unit diff --git a/matrix-authentication-service/.github/workflows/docs.yaml b/matrix-authentication-service/.github/workflows/docs.yaml new file mode 100644 index 00000000..6b0ec68b --- /dev/null +++ b/matrix-authentication-service/.github/workflows/docs.yaml @@ -0,0 +1,77 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Build and deploy the documentation + +on: + push: + branches: [main] + pull_request: + branches: [main] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + CARGO_TERM_COLOR: always + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + +jobs: + build: + name: Build the documentation + runs-on: ubuntu-24.04 + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Setup sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Install mdbook + uses: taiki-e/install-action@v2 + with: + tool: mdbook + + - name: Install Node + uses: actions/setup-node@v6.2.0 + with: + node-version: 24 + + - name: Build the documentation + run: sh misc/build-docs.sh + + - name: Fix permissions + run: | + chmod -c -R +rX "target/book/" | while read line; do + echo "::warning title=Invalid file permissions automatically fixed::$line" + done + + - name: Upload GitHub Pages artifacts + uses: actions/upload-pages-artifact@v4.0.0 + with: + path: target/book/ + + deploy: + name: Deploy the documentation on GitHub Pages + runs-on: ubuntu-24.04 + needs: build + if: github.ref == 'refs/heads/main' + + permissions: + pages: write + id-token: write + + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4.0.5 diff --git a/matrix-authentication-service/.github/workflows/merge-back.yaml b/matrix-authentication-service/.github/workflows/merge-back.yaml new file mode 100644 index 00000000..8239442f --- /dev/null +++ b/matrix-authentication-service/.github/workflows/merge-back.yaml @@ -0,0 +1,40 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Merge back a reference to main +on: + workflow_call: + inputs: + sha: + required: true + type: string + secrets: + BOT_GITHUB_TOKEN: + required: true + +jobs: + merge-back: + name: Merge back the reference to main + runs-on: ubuntu-24.04 + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + with: + sparse-checkout: | + .github/scripts + + - name: Push branch and open a PR + uses: actions/github-script@v8.0.0 + env: + SHA: ${{ inputs.sha }} + with: + github-token: ${{ secrets.BOT_GITHUB_TOKEN }} + script: | + const script = require('./.github/scripts/merge-back.cjs'); + await script({ core, github, context }); diff --git a/matrix-authentication-service/.github/workflows/release-branch.yaml b/matrix-authentication-service/.github/workflows/release-branch.yaml new file mode 100644 index 00000000..0b46539b --- /dev/null +++ b/matrix-authentication-service/.github/workflows/release-branch.yaml @@ -0,0 +1,123 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Create a new release branch +on: + workflow_dispatch: + inputs: + kind: + description: Kind of release (major = v1.2.3 -> v2.0.0-rc.0, minor = v1.2.3 -> v1.3.0-rc.0) + required: true + type: choice + default: minor + options: + - major + - minor + +jobs: + compute-version: + name: Compute the next ${{ inputs.kind }} RC version + runs-on: ubuntu-24.04 + + permissions: + contents: read + + outputs: + full: ${{ steps.next.outputs.full }} + short: ${{ steps.next.outputs.short }} + + steps: + - name: Fail the workflow if this is not the main branch + if: ${{ github.ref_name != 'main' }} + run: exit 1 + + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Compute the new minor RC + id: next + env: + BUMP: pre${{ inputs.kind }} + run: | + CURRENT_VERSION="$(cargo metadata --format-version 1 --no-deps | jq -r '.packages[] | select(.name == "mas-cli") | .version')" + NEXT_VERSION="$(npx --yes semver@7.5.4 -i "$BUMP" --preid rc "${CURRENT_VERSION}")" + # compute the short minor version, e.g. 0.1.0-rc.1 -> 0.1 + SHORT_VERSION="$(echo "${NEXT_VERSION}" | cut -d. -f1-2)" + echo "full=${NEXT_VERSION}" >> "$GITHUB_OUTPUT" + echo "short=${SHORT_VERSION}" >> "$GITHUB_OUTPUT" + + localazy: + name: Create a new branch in Localazy + runs-on: ubuntu-24.04 + needs: [compute-version] + + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Node + uses: actions/setup-node@v6.2.0 + with: + node-version: 24 + + - name: Install Localazy CLI + run: npm install -g @localazy/cli + + - name: Create a new branch in Localazy + run: localazy branch -w "$LOCALAZY_WRITE_KEY" create main "$BRANCH" + env: + LOCALAZY_WRITE_KEY: ${{ secrets.LOCALAZY_WRITE_KEY }} + # Localazy doesn't like slashes in branch names, so we just use the short version + # For example, a 0.13.0 release will create a localazy branch named "v0.13" and a git branch named "release/v0.13" + BRANCH: v${{ needs.compute-version.outputs.short }} + + tag: + uses: ./.github/workflows/tag.yaml + needs: [compute-version] + with: + version: ${{ needs.compute-version.outputs.full }} + secrets: + BOT_GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }} + + merge-back: + uses: ./.github/workflows/merge-back.yaml + needs: [tag] + with: + sha: ${{ needs.tag.outputs.sha }} + secrets: + BOT_GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }} + + branch: + name: Create a new release branch + runs-on: ubuntu-24.04 + + permissions: + contents: write + pull-requests: write + + needs: [tag, compute-version, localazy] + steps: + - name: Checkout the code + uses: actions/checkout@v6 + with: + sparse-checkout: | + .github/scripts + + - name: Create a new release branch + uses: actions/github-script@v8.0.0 + env: + BRANCH: release/v${{ needs.compute-version.outputs.short }} + SHA: ${{ needs.tag.outputs.sha }} + with: + github-token: ${{ secrets.BOT_GITHUB_TOKEN }} + script: | + const script = require('./.github/scripts/create-release-branch.cjs'); + await script({ core, github, context }); diff --git a/matrix-authentication-service/.github/workflows/release-bump.yaml b/matrix-authentication-service/.github/workflows/release-bump.yaml new file mode 100644 index 00000000..a2a20791 --- /dev/null +++ b/matrix-authentication-service/.github/workflows/release-bump.yaml @@ -0,0 +1,93 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Bump the version on a release branch +on: + workflow_dispatch: + inputs: + rc: + description: "Is it a release candidate?" + type: boolean + default: false + merge-back: + description: "Should we merge back the release branch to main?" + type: boolean + default: true + +jobs: + compute-version: + name: Compute the next version + runs-on: ubuntu-24.04 + + permissions: + contents: read + + outputs: + version: ${{ steps.next.outputs.version }} + + steps: + - name: Fail the workflow if not on a release branch + if: ${{ !startsWith(github.ref_name, 'release/v') }} + run: exit 1 + + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Extract the current version + id: current + run: echo "version=$(cargo metadata --format-version 1 --no-deps | jq -r '.packages[] | select(.name == "mas-cli") | .version')" >> "$GITHUB_OUTPUT" + + - name: Compute the new minor RC + id: next + env: + BUMP: ${{ inputs.rc && 'prerelease' || 'patch' }} + VERSION: ${{ steps.current.outputs.version }} + run: echo "version=$(npx --yes semver@7.5.4 -i "$BUMP" --preid rc "$VERSION")" >> "$GITHUB_OUTPUT" + + tag: + uses: ./.github/workflows/tag.yaml + needs: [compute-version] + with: + version: ${{ needs.compute-version.outputs.version }} + secrets: + BOT_GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }} + + merge-back: + uses: ./.github/workflows/merge-back.yaml + needs: [tag] + if: inputs.merge-back + with: + sha: ${{ needs.tag.outputs.sha }} + secrets: + BOT_GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }} + + update-branch: + name: Update the release branch + runs-on: ubuntu-24.04 + + permissions: + pull-requests: write + + needs: [tag, compute-version] + steps: + - name: Checkout the code + uses: actions/checkout@v6 + with: + sparse-checkout: | + .github/scripts + + - name: Update the release branch + uses: actions/github-script@v8.0.0 + env: + BRANCH: "${{ github.ref_name }}" + SHA: ${{ needs.tag.outputs.sha }} + with: + github-token: ${{ secrets.BOT_GITHUB_TOKEN }} + script: | + const script = require('./.github/scripts/update-release-branch.cjs'); + await script({ core, github, context }); diff --git a/matrix-authentication-service/.github/workflows/tag.yaml b/matrix-authentication-service/.github/workflows/tag.yaml new file mode 100644 index 00000000..c6c394c8 --- /dev/null +++ b/matrix-authentication-service/.github/workflows/tag.yaml @@ -0,0 +1,71 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Tag a new version +on: + workflow_call: + inputs: + version: + required: true + type: string + outputs: + sha: + description: "The SHA of the commit made which bumps the version" + value: ${{ jobs.tag.outputs.sha }} + secrets: + BOT_GITHUB_TOKEN: + required: true + +jobs: + tag: + name: Tag a new version + runs-on: ubuntu-24.04 + permissions: + contents: write + + outputs: + sha: ${{ fromJSON(steps.commit.outputs.result).commit }} + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Set the crates version + env: + VERSION: ${{ inputs.version }} + run: | + sed -i "s/^package.version = .*/package.version = \"$VERSION\"/" Cargo.toml + sed -i "/path = \".\/crates\//s/version = \".*\"/version = \"=$VERSION\"/" Cargo.toml + + - name: Run `cargo metadata` to make sure the lockfile is up to date + run: cargo metadata --format-version 1 + + - name: Commit and tag using the GitHub API + uses: actions/github-script@v8.0.0 + id: commit + env: + VERSION: ${{ inputs.version }} + with: + # Commit & tag with the actions token, so that they get signed + # This returns the commit sha and the tag object sha + script: | + const script = require('./.github/scripts/commit-and-tag.cjs'); + return await script({ core, github, context }); + + - name: Update the refs + uses: actions/github-script@v8.0.0 + env: + VERSION: ${{ inputs.version }} + TAG_SHA: ${{ fromJSON(steps.commit.outputs.result).tag }} + COMMIT_SHA: ${{ fromJSON(steps.commit.outputs.result).commit }} + with: + # Update the refs with the bot token, so that workflows are triggered + github-token: ${{ secrets.BOT_GITHUB_TOKEN }} + script: | + const script = require('./.github/scripts/create-version-tag.cjs'); + await script({ core, github, context }); diff --git a/matrix-authentication-service/.github/workflows/translations-download.yaml b/matrix-authentication-service/.github/workflows/translations-download.yaml new file mode 100644 index 00000000..e964bf37 --- /dev/null +++ b/matrix-authentication-service/.github/workflows/translations-download.yaml @@ -0,0 +1,63 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Download translation files from Localazy +on: + workflow_dispatch: + +jobs: + download: + runs-on: ubuntu-24.04 + permissions: + contents: write + + steps: + - name: Fail the workflow if not on the main branch or a release branch + if: ${{ !(startsWith(github.ref_name, 'release/v') || github.ref_name == 'main') }} + run: exit 1 + + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Node + uses: actions/setup-node@v6.2.0 + with: + node-version: 24 + + - name: Install Localazy CLI + run: npm install -g @localazy/cli + + - name: Compute the Localazy branch name + id: branch + # This will strip the "release/" prefix if present, keeping 'main' as-is + run: echo "name=${GITHUB_REF_NAME#release/}" >> "$GITHUB_OUTPUT" + + - name: Download translations from Localazy + run: localazy download -w "$LOCALAZY_WRITE_KEY" -b "$BRANCH" + env: + LOCALAZY_WRITE_KEY: ${{ secrets.LOCALAZY_WRITE_KEY }} + BRANCH: ${{ steps.branch.outputs.name }} + + - name: Create Pull Request + id: cpr + uses: peter-evans/create-pull-request@v8.1.0 + with: + sign-commits: true + token: ${{ secrets.BOT_GITHUB_TOKEN }} + branch-token: ${{ secrets.GITHUB_TOKEN }} + branch: actions/localazy-download/${{ steps.branch.outputs.name }} + delete-branch: true + title: Translations updates for ${{ steps.branch.outputs.name }} + labels: | + T-Task + A-I18n + commit-message: Translations updates + + - name: Enable automerge + run: gh pr merge --merge --auto "$PR_NUMBER" + if: steps.cpr.outputs.pull-request-operation == 'created' + env: + GH_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }} + PR_NUMBER: ${{ steps.cpr.outputs.pull-request-number }} diff --git a/matrix-authentication-service/.github/workflows/translations-upload.yaml b/matrix-authentication-service/.github/workflows/translations-upload.yaml new file mode 100644 index 00000000..453a0d46 --- /dev/null +++ b/matrix-authentication-service/.github/workflows/translations-upload.yaml @@ -0,0 +1,41 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +name: Upload translation files to Localazy +on: + push: + branches: + - main + - release/v** + +jobs: + upload: + runs-on: ubuntu-24.04 + permissions: + contents: read + + steps: + - name: Checkout the code + uses: actions/checkout@v6 + + - name: Install Node + uses: actions/setup-node@v6.2.0 + with: + node-version: 24 + + - name: Install Localazy CLI + run: npm install -g @localazy/cli + + - name: Compute the Localazy branch name + id: branch + run: | + # This will strip the "release/" prefix if present, keeping 'main' as-is + echo "name=${GITHUB_REF_NAME#release/}" >> "$GITHUB_OUTPUT" + + - name: Upload translations to Localazy + run: localazy upload -w "$LOCALAZY_WRITE_KEY" -b "$BRANCH" + env: + LOCALAZY_WRITE_KEY: ${{ secrets.LOCALAZY_WRITE_KEY }} + BRANCH: ${{ steps.branch.outputs.name }} diff --git a/matrix-authentication-service/.gitignore b/matrix-authentication-service/.gitignore new file mode 100644 index 00000000..d98402c2 --- /dev/null +++ b/matrix-authentication-service/.gitignore @@ -0,0 +1,14 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +# Rust +target + +# Editors +.idea +.nova + +# OS garbage +.DS_Store diff --git a/matrix-authentication-service/.rustfmt.toml b/matrix-authentication-service/.rustfmt.toml new file mode 100644 index 00000000..72a97f56 --- /dev/null +++ b/matrix-authentication-service/.rustfmt.toml @@ -0,0 +1,11 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +max_width = 100 +comment_width = 80 +wrap_comments = true +imports_granularity = "Crate" +use_small_heuristics = "Default" +group_imports = "StdExternalCrate" diff --git a/matrix-authentication-service/CONTRIBUTING.md b/matrix-authentication-service/CONTRIBUTING.md new file mode 100644 index 00000000..356a702c --- /dev/null +++ b/matrix-authentication-service/CONTRIBUTING.md @@ -0,0 +1,5 @@ +# Contributing to MAS + +Thank you for taking the time to contribute to Matrix! + +Please see the [contributors' guide](https://element-hq.github.io/matrix-authentication-service/development/contributing.html) in our rendered documentation. diff --git a/matrix-authentication-service/Cargo.lock b/matrix-authentication-service/Cargo.lock new file mode 100644 index 00000000..496fe0fd --- /dev/null +++ b/matrix-authentication-service/Cargo.lock @@ -0,0 +1,7919 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + +[[package]] +name = "addr2line" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + +[[package]] +name = "ahash" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "aide" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6966317188cdfe54c58c0900a195d021294afb3ece9b7073d09e4018dbb1e3a2" +dependencies = [ + "aide-macros", + "axum", + "axum-extra", + "bytes", + "cfg-if", + "http", + "indexmap 2.11.4", + "schemars 0.9.0", + "serde", + "serde_json", + "serde_qs", + "thiserror 2.0.17", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "aide-macros" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f2a08f14808f3c46f3e3004b727bace64af44c3c5996d0480a14d3852b1b25a" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2" +dependencies = [ + "windows-sys 0.60.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.60.2", +] + +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + +[[package]] +name = "arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" + +[[package]] +name = "arc-swap" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ded5f9a03ac8f24d1b8a25101ee812cd32cdc8c50a4c50237de2c4915850e73" +dependencies = [ + "rustversion", +] + +[[package]] +name = "argon2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" +dependencies = [ + "base64ct", + "blake2", + "cpufeatures", + "password-hash", +] + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "as_variant" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dbc3a507a82b17ba0d98f6ce8fd6954ea0c8152e98009d36a40d8dcc8ce078a" + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "assert_matches" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497c00e0fd83a72a79a39fcbd8e3e2f055d6f6c7e025f3b3d91f4f8e76527fb8" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "pin-project-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.5.0", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-graphql" +version = "7.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "036618f842229ba0b89652ffe425f96c7c16a49f7e3cb23b56fca7f61fd74980" +dependencies = [ + "async-graphql-derive", + "async-graphql-parser", + "async-graphql-value", + "async-stream", + "async-trait", + "base64", + "bytes", + "chrono", + "fnv", + "futures-timer", + "futures-util", + "http", + "indexmap 2.11.4", + "mime", + "multer", + "num-traits", + "pin-project-lite", + "regex", + "serde", + "serde_json", + "serde_urlencoded", + "static_assertions_next", + "thiserror 1.0.69", + "tracing", + "tracing-futures", + "url", +] + +[[package]] +name = "async-graphql-derive" +version = "7.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd45deb3dbe5da5cdb8d6a670a7736d735ba65b455328440f236dfb113727a3d" +dependencies = [ + "Inflector", + "async-graphql-parser", + "darling", + "proc-macro-crate", + "proc-macro2", + "quote", + "strum", + "syn", + "thiserror 1.0.69", +] + +[[package]] +name = "async-graphql-parser" +version = "7.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b7607e59424a35dadbc085b0d513aa54ec28160ee640cf79ec3b634eba66d3" +dependencies = [ + "async-graphql-value", + "pest", + "serde", + "serde_json", +] + +[[package]] +name = "async-graphql-value" +version = "7.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ecdaff7c9cffa3614a9f9999bf9ee4c3078fe3ce4d6a6e161736b56febf2de" +dependencies = [ + "bytes", + "indexmap 2.11.4", + "serde", + "serde_json", +] + +[[package]] +name = "async-io" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" +dependencies = [ + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-lock" +version = "3.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" +dependencies = [ + "event-listener 5.4.1", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-process" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" +dependencies = [ + "async-channel 2.5.0", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener 5.4.1", + "futures-lite", + "rustix", +] + +[[package]] +name = "async-signal" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-std" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c8e079a4ab67ae52b7403632e4618815d6db36d2a010cfe41b02c1b1578f93b" +dependencies = [ + "async-channel 1.9.0", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + +[[package]] +name = "atomic" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89cbf775b137e9b968e67227ef7f775587cde3fd31b0d8599dbd0f598a48340" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "aws-lc-rs" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94b8ff6c09cd57b16da53641caa860168b88c172a5ee163b0288d3d6eea12786" +dependencies = [ + "aws-lc-sys", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e44d16778acaf6a9ec9899b92cebd65580b83f685446bf2e1f5d3d732f99dcd" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", +] + +[[package]] +name = "axum" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18ed336352031311f4e0b4dd2ff392d4fbb370777c9d18d7fc9d7359f73871" +dependencies = [ + "axum-core", + "bytes", + "form_urlencoded", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "serde_core", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59446ce19cd142f8833f856eb31f3eb097812d1479ab224f54d72428ca21ea22" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-extra" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9963ff19f40c6102c76756ef0a46004c0d58957d87259fc9208ff8441c12ab96" +dependencies = [ + "axum", + "axum-core", + "bytes", + "cookie", + "form_urlencoded", + "futures-util", + "headers", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "serde_core", + "serde_html_form", + "serde_path_to_error", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "backtrace" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-link 0.2.1", +] + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" + +[[package]] +name = "bcrypt" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a0f5948f30df5f43ac29d310b7476793be97c50787e6ef4a63d960a0d0be827" +dependencies = [ + "base64", + "blowfish", + "getrandom 0.3.3", + "subtle", + "zeroize", +] + +[[package]] +name = "bindgen" +version = "0.72.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "itertools 0.13.0", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn", +] + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +dependencies = [ + "serde_core", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blocking" +version = "1.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" +dependencies = [ + "async-channel 2.5.0", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + +[[package]] +name = "blowfish" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7" +dependencies = [ + "byteorder", + "cipher", +] + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +dependencies = [ + "allocator-api2", +] + +[[package]] +name = "bytemuck" +version = "1.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" +dependencies = [ + "serde", +] + +[[package]] +name = "calendrical_calculations" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f27ca2b6e2f7d75f43e001ded6f25e79b80bded5abbe764cbdf78c25a3051f4b" +dependencies = [ + "core_maths", + "displaydoc", +] + +[[package]] +name = "camino" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609" +dependencies = [ + "serde_core", +] + +[[package]] +name = "castaway" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" +dependencies = [ + "rustversion", +] + +[[package]] +name = "cbc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" +dependencies = [ + "cipher", +] + +[[package]] +name = "cc" +version = "1.2.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6354c81bbfd62d9cfa9cb3c773c2b7b2a3a482d569de977fd0e961f6e7c00583" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom 7.1.3", +] + +[[package]] +name = "cfg-if" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chacha20" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "chacha20poly1305" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" +dependencies = [ + "aead", + "chacha20", + "cipher", + "poly1305", + "zeroize", +] + +[[package]] +name = "chrono" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link 0.2.1", +] + +[[package]] +name = "chrono-tz" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" +dependencies = [ + "chrono", + "phf", +] + +[[package]] +name = "chronoutil" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9b58b07a67cadda9502b270eca5e0f1cd3afd08445e0ab1d52d909db01b4543" +dependencies = [ + "chrono", +] + +[[package]] +name = "chumsky" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" +dependencies = [ + "hashbrown 0.14.5", + "stacker", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", + "zeroize", +] + +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "4.5.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c2cfd7bf8a6017ddaa4e32ffe7403d547790db06bd171c1c53926faab501623" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a4c05b9e80c5ccd3a7ef080ad7b6ba7d6fc00a985b8b157197075677c82c7a0" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" + +[[package]] +name = "cmake" +version = "0.1.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +dependencies = [ + "cc", +] + +[[package]] +name = "cobs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1" +dependencies = [ + "thiserror 2.0.17", +] + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "compact_str" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a" +dependencies = [ + "castaway", + "cfg-if", + "itoa", + "rustversion", + "ryu", + "static_assertions", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "console" +version = "0.15.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width 0.2.1", + "windows-sys 0.59.0", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "convert_case" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db05ffb6856bf0ecdf6367558a76a0e8a77b1713044eb92845c692100ed50190" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "cookie" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" +dependencies = [ + "aes-gcm", + "base64", + "hkdf", + "percent-encoding", + "rand 0.8.5", + "sha2", + "subtle", + "time", + "version_check", +] + +[[package]] +name = "cookie_store" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fc4bff745c9b4c7fb1e97b25d13153da2bc7796260141df62378998d070207f" +dependencies = [ + "cookie", + "document-features", + "idna", + "log", + "serde", + "serde_derive", + "serde_json", + "time", + "url", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "core_maths" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77745e017f5edba1a9c1d854f6f3a52dac8a12dd5af5d2f54aecf61e43d80d30" +dependencies = [ + "libm", +] + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "cranelift-assembler-x64" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08cdfa673abeaf2aa0634988468a751fbf5b3de612bd48c1bb36a3dc7e42fe44" +dependencies = [ + "cranelift-assembler-x64-meta", +] + +[[package]] +name = "cranelift-assembler-x64-meta" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "744d5b84c226fe5dd5cc522552d2c69a55e1ea9f98e650b9075493d263698fca" +dependencies = [ + "cranelift-srcgen", +] + +[[package]] +name = "cranelift-bforest" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb9850ce67c4bdc5708204a24f3f571e1e933be2852ec785c778ad76e1f91a5e" +dependencies = [ + "cranelift-entity", +] + +[[package]] +name = "cranelift-bitset" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efa84e3a1dba026781d0a24761b072e03bbb404b8015f621d332457f627b3a19" +dependencies = [ + "serde", + "serde_derive", +] + +[[package]] +name = "cranelift-codegen" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cabfe32111207a68ddd237d184300789c6d650d47db0ff7c9c53ef48e347902" +dependencies = [ + "bumpalo", + "cranelift-assembler-x64", + "cranelift-bforest", + "cranelift-bitset", + "cranelift-codegen-meta", + "cranelift-codegen-shared", + "cranelift-control", + "cranelift-entity", + "cranelift-isle", + "gimli", + "hashbrown 0.15.5", + "log", + "pulley-interpreter", + "regalloc2", + "rustc-hash", + "serde", + "smallvec", + "target-lexicon", + "wasmtime-internal-math", +] + +[[package]] +name = "cranelift-codegen-meta" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdc5479395bb325f96e2e5e714e8c276b061c0eaa020525332bf16c6046a825" +dependencies = [ + "cranelift-assembler-x64-meta", + "cranelift-codegen-shared", + "cranelift-srcgen", + "heck 0.5.0", + "pulley-interpreter", +] + +[[package]] +name = "cranelift-codegen-shared" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678aea3a48ca54a38e1b057c253daf2ff4c2869b1e70af6545bee1475434b20d" + +[[package]] +name = "cranelift-control" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d5210a53058d2b2504269d168fb075f80f3921126dd27e593e726b6387413be" +dependencies = [ + "arbitrary", +] + +[[package]] +name = "cranelift-entity" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb138631be4325459938ea0507fb6001a9bbfe6022ee130423acbd8583c47244" +dependencies = [ + "cranelift-bitset", + "serde", + "serde_derive", +] + +[[package]] +name = "cranelift-frontend" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c087396f79a0cdcdd38c7adc1e9955ba3022d026afb9f08769f0c13795d1b6b" +dependencies = [ + "cranelift-codegen", + "log", + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cranelift-isle" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52eaa7b30b2a2d85f177790227f8f7a9b76d35da96302ef28fb394e588e3530b" + +[[package]] +name = "cranelift-native" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92c68cefa46cc4e37728d0789a11744dc619a5bd96cabbe44cb9d8dcacc20134" +dependencies = [ + "cranelift-codegen", + "libc", + "target-lexicon", +] + +[[package]] +name = "cranelift-srcgen" +version = "0.127.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9092860471c4562c18ea1e47f446072795ad344a4a01f7d0f8cee445390d545" + +[[package]] +name = "crc" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "cron" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5877d3fbf742507b66bc2a1945106bd30dd8504019d596901ddd012a4dd01740" +dependencies = [ + "chrono", + "once_cell", + "winnow 0.6.26", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "typenum", +] + +[[package]] +name = "csv" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52cd9d68cf7efc6ddfaaee42e7288d3a99d613d4b50f76ce9827ae0c6e14f938" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde_core", +] + +[[package]] +name = "csv-core" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d02f3b0da4c6504f86e9cd789d8dbafab48c2321be74e9987593de5a894d93d" +dependencies = [ + "memchr", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "deadpool" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0be2b1d1d6ec8d846f05e137292d0b89133caf95ef33695424c09568bdd39b1b" +dependencies = [ + "deadpool-runtime", + "lazy_static", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" + +[[package]] +name = "debugid" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" +dependencies = [ + "serde", + "uuid", +] + +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d630bccd429a5bb5a64b5e94f693bfc48c9f8566418fda4c494cc94f911f87cc" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn", +] + +[[package]] +name = "dialoguer" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de" +dependencies = [ + "console", + "fuzzy-matcher", + "shell-words", + "thiserror 1.0.69", + "zeroize", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "document-features" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95249b50c6c185bee49034bcb378a49dc2b5dff0be90ff6616d31d64febab05d" +dependencies = [ + "litrs", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "duration-str" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb333721800c025e363e902b293040778f8ac79913db4f013abf1f1d7d382fd7" +dependencies = [ + "rust_decimal", + "thiserror 2.0.17", + "winnow 0.7.13", +] + +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest", + "elliptic-curve", + "rfc6979", + "signature", + "spki", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest", + "ff", + "generic-array", + "group", + "hkdf", + "pem-rfc7468", + "pkcs8", + "rand_core 0.6.4", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "email-encoding" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9298e6504d9b9e780ed3f7dfd43a61be8cd0e09eb07f7706a945b0072b6670b6" +dependencies = [ + "base64", + "memchr", +] + +[[package]] +name = "email_address" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449" + +[[package]] +name = "embedded-io" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" + +[[package]] +name = "embedded-io" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d" + +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener 5.4.1", + "pin-project-lite", +] + +[[package]] +name = "fallible-iterator" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" + +[[package]] +name = "fancy-regex" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2" +dependencies = [ + "bit-set", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "ff" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "figment" +version = "0.10.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cb01cd46b0cf372153850f4c6c272d9cbea2da513e07538405148f95bd789f3" +dependencies = [ + "atomic", + "parking_lot", + "pear", + "serde", + "serde_yaml", + "tempfile", + "uncased", + "version_check", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" + +[[package]] +name = "fixed_decimal" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0febbeb1118a9ecdee6e4520ead6b54882e843dd0592ad233247dbee84c53db8" +dependencies = [ + "displaydoc", + "smallvec", + "writeable", +] + +[[package]] +name = "flume" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" +dependencies = [ + "futures-core", + "futures-sink", + "spin", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fuzzy-matcher" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94" +dependencies = [ + "thread_local", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasi 0.14.7+wasi-0.2.4", +] + +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", +] + +[[package]] +name = "gimli" +version = "0.32.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" +dependencies = [ + "fallible-iterator", + "indexmap 2.11.4", + "stable_deref_trait", +] + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "governor" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444405bbb1a762387aa22dd569429533b54a1d8759d35d3b64cb39b0293eaa19" +dependencies = [ + "cfg-if", + "dashmap", + "futures-sink", + "futures-timer", + "futures-util", + "hashbrown 0.15.5", + "nonzero_ext", + "parking_lot", + "portable-atomic", + "quanta", + "smallvec", + "spinning_top", + "web-time", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap 2.11.4", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", + "serde", +] + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "headers" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" +dependencies = [ + "base64", + "bytes", + "headers-core", + "http", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" +dependencies = [ + "http", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "hostname" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56f203cd1c76362b69e3863fd987520ac36cf70a8c92627449b2f64a8cf7d65" +dependencies = [ + "cfg-if", + "libc", + "windows-link 0.1.3", +] + +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_calendar" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7265b2137f9a36f7634a308d91f984574bbdba8cfd95ceffe1c345552275a8ff" +dependencies = [ + "calendrical_calculations", + "displaydoc", + "icu_calendar_data", + "icu_locid", + "icu_locid_transform", + "icu_provider", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_calendar_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "820499e77e852162190608b4f444e7b4552619150eafc39a9e39333d9efae9e1" + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_datetime" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d115efb85e08df3fd77e77f52e7e087545a783fffba8be80bfa2102f306b1780" +dependencies = [ + "displaydoc", + "either", + "fixed_decimal", + "icu_calendar", + "icu_datetime_data", + "icu_decimal", + "icu_locid", + "icu_locid_transform", + "icu_plurals", + "icu_provider", + "icu_timezone", + "smallvec", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_datetime_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef5f04076123cab1b7a926a7083db27fe0d7a0e575adb984854aae3f3a6507d" + +[[package]] +name = "icu_decimal" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb8fd98f86ec0448d85e1edf8884e4e318bb2e121bd733ec929a05c0a5e8b0eb" +dependencies = [ + "displaydoc", + "fixed_decimal", + "icu_decimal_data", + "icu_locid_transform", + "icu_provider", + "writeable", +] + +[[package]] +name = "icu_decimal_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67c95dd97f5ccf6d837a9c115496ec7d36646fa86ca18e7f1412115b4c820ae2" + +[[package]] +name = "icu_experimental" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "844ad7b682a165c758065d694bc4d74ac67f176da1c499a04d85d492c0f193b7" +dependencies = [ + "displaydoc", + "fixed_decimal", + "icu_collections", + "icu_decimal", + "icu_experimental_data", + "icu_locid", + "icu_locid_transform", + "icu_normalizer", + "icu_pattern", + "icu_plurals", + "icu_properties", + "icu_provider", + "litemap", + "num-bigint", + "num-rational", + "num-traits", + "smallvec", + "tinystr", + "writeable", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_experimental_data" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121df92eafb8f5286d4e8ff401c1e7db8384377f806db3f8db77b91e5b7bd4dd" + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" + +[[package]] +name = "icu_pattern" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7f36aafd098d6717de34e668a8120822275c1fba22b936e757b7de8a2fd7e4" +dependencies = [ + "displaydoc", + "either", + "writeable", + "yoke", + "zerofrom", +] + +[[package]] +name = "icu_plurals" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a70e7c025dbd5c501b0a5c188cd11666a424f0dadcd4f0a95b7dafde3b114" +dependencies = [ + "displaydoc", + "fixed_decimal", + "icu_locid_transform", + "icu_plurals_data", + "icu_provider", + "zerovec", +] + +[[package]] +name = "icu_plurals_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a483403238cb7d6a876a77a5f8191780336d80fe7b8b00bfdeb20be6abbfd112" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_adapters" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6324dfd08348a8e0374a447ebd334044d766b1839bb8d5ccf2482a99a77c0bc" +dependencies = [ + "icu_locid", + "icu_locid_transform", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "icu_timezone" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa91ba6a585939a020c787235daa8aee856d9bceebd6355e283c0c310bc6de96" +dependencies = [ + "displaydoc", + "icu_calendar", + "icu_provider", + "icu_timezone_data", + "tinystr", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_timezone_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1adcf7b613a268af025bc2a2532b4b9ee294e6051c5c0832d8bff20ac0232e68" + +[[package]] +name = "id-arena" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005" + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" +dependencies = [ + "equivalent", + "hashbrown 0.15.5", + "serde", + "serde_core", +] + +[[package]] +name = "indoc" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" + +[[package]] +name = "inherent" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c727f80bfa4a6c6e2508d2f05b6f4bfce242030bd88ed15ae5331c5b5d30fba7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "inlinable_string" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "insta" +version = "1.46.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e82db8c87c7f1ccecb34ce0c24399b8a73081427f3c7c50a5d597925356115e4" +dependencies = [ + "console", + "once_cell", + "serde", + "similar", + "tempfile", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "ipnetwork" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf466541e9d546596ee94f9f69590f89473455f88372423e0008fc1a7daf100e" +dependencies = [ + "serde", +] + +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.3", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0b063578492ceec17683ef2f8c5e89121fbd0b172cbc280635ab7567db2738" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "js_int" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d937f95470b270ce8b8950207715d71aa8e153c0d44c6684d59397ed4949160a" +dependencies = [ + "serde", +] + +[[package]] +name = "json-patch" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "159294d661a039f7644cea7e4d844e6b25aaf71c1ffe9d73a96d768c24b0faf4" +dependencies = [ + "jsonptr", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "jsonptr" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5a3cc660ba5d72bce0b3bb295bf20847ccbb40fd423f3f05b61273672e561fe" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "k256" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" +dependencies = [ + "cfg-if", + "ecdsa", + "elliptic-curve", + "once_cell", + "sha2", + "signature", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "language-tags" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" +dependencies = [ + "serde", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "lettre" +version = "0.11.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e13e10e8818f8b2a60f52cb127041d388b89f3a96a62be9ceaffa22262fef7f" +dependencies = [ + "async-std", + "async-trait", + "base64", + "chumsky", + "email-encoding", + "email_address", + "fastrand", + "futures-io", + "futures-util", + "hostname", + "httpdate", + "idna", + "mime", + "nom 8.0.0", + "percent-encoding", + "quoted_printable", + "rustls", + "rustls-platform-verifier", + "socket2", + "tokio", + "tokio-rustls", + "tracing", + "url", +] + +[[package]] +name = "libc" +version = "0.2.180" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" + +[[package]] +name = "libloading" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" +dependencies = [ + "cfg-if", + "windows-targets 0.53.3", +] + +[[package]] +name = "libm" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" + +[[package]] +name = "libredox" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" +dependencies = [ + "bitflags", + "libc", + "redox_syscall", +] + +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] +name = "listenfd" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b87bc54a4629b4294d0b3ef041b64c40c611097a677d9dc07b2c67739fe39dba" +dependencies = [ + "libc", + "uuid", + "winapi", +] + +[[package]] +name = "litemap" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" + +[[package]] +name = "litrs" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" + +[[package]] +name = "lock_api" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" +dependencies = [ + "value-bag", +] + +[[package]] +name = "mach2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d640282b302c0bb0a2a8e0233ead9035e3bed871f0b7e81fe4a1ec829765db44" +dependencies = [ + "libc", +] + +[[package]] +name = "mas-axum-utils" +version = "1.12.0" +dependencies = [ + "anyhow", + "axum", + "axum-extra", + "base64ct", + "chrono", + "headers", + "http", + "icu_locid", + "mas-data-model", + "mas-http", + "mas-iana", + "mas-jose", + "mas-keystore", + "mas-storage", + "mas-templates", + "mime", + "oauth2-types", + "rand 0.8.5", + "reqwest", + "sentry", + "serde", + "serde_json", + "serde_with", + "thiserror 2.0.17", + "tokio", + "tracing", + "ulid", + "url", +] + +[[package]] +name = "mas-cli" +version = "1.12.0" +dependencies = [ + "anyhow", + "axum", + "bytes", + "camino", + "chrono", + "clap", + "console", + "dialoguer", + "dotenvy", + "figment", + "futures-util", + "headers", + "http-body-util", + "hyper", + "ipnetwork", + "itertools 0.14.0", + "listenfd", + "mas-config", + "mas-context", + "mas-data-model", + "mas-email", + "mas-handlers", + "mas-http", + "mas-i18n", + "mas-keystore", + "mas-listener", + "mas-matrix", + "mas-matrix-synapse", + "mas-policy", + "mas-router", + "mas-storage", + "mas-storage-pg", + "mas-tasks", + "mas-templates", + "mas-tower", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-instrumentation-process", + "opentelemetry-instrumentation-tokio", + "opentelemetry-jaeger-propagator", + "opentelemetry-otlp", + "opentelemetry-prometheus-text-exporter", + "opentelemetry-resource-detectors", + "opentelemetry-semantic-conventions", + "opentelemetry-stdout", + "opentelemetry_sdk", + "rand 0.8.5", + "rand_chacha 0.3.1", + "reqwest", + "rustls", + "sd-notify", + "sentry", + "sentry-tower", + "sentry-tracing", + "serde_json", + "serde_yaml", + "sqlx", + "syn2mas", + "tokio", + "tokio-util", + "tower", + "tower-http", + "tracing", + "tracing-appender", + "tracing-opentelemetry", + "tracing-subscriber", + "url", + "vergen-gitcl", + "zeroize", +] + +[[package]] +name = "mas-config" +version = "1.12.0" +dependencies = [ + "anyhow", + "camino", + "chrono", + "figment", + "futures-util", + "governor", + "hex", + "indoc", + "ipnetwork", + "lettre", + "mas-iana", + "mas-jose", + "mas-keystore", + "pem-rfc7468", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rustls-pki-types", + "schemars 0.9.0", + "serde", + "serde_json", + "serde_with", + "tokio", + "tracing", + "ulid", + "url", +] + +[[package]] +name = "mas-context" +version = "1.12.0" +dependencies = [ + "console", + "opentelemetry", + "pin-project-lite", + "quanta", + "tokio", + "tower-layer", + "tower-service", + "tracing", + "tracing-opentelemetry", + "tracing-subscriber", +] + +[[package]] +name = "mas-data-model" +version = "1.12.0" +dependencies = [ + "base64ct", + "chrono", + "crc", + "lettre", + "mas-iana", + "mas-jose", + "oauth2-types", + "rand 0.8.5", + "rand_chacha 0.3.1", + "regex", + "ruma-common", + "serde", + "serde_json", + "thiserror 2.0.17", + "ulid", + "url", + "woothee", +] + +[[package]] +name = "mas-email" +version = "1.12.0" +dependencies = [ + "async-trait", + "lettre", + "mas-templates", + "thiserror 2.0.17", + "tracing", +] + +[[package]] +name = "mas-handlers" +version = "1.12.0" +dependencies = [ + "aide", + "anyhow", + "argon2", + "async-graphql", + "async-trait", + "axum", + "axum-extra", + "axum-macros", + "base64ct", + "bcrypt", + "camino", + "chrono", + "cookie_store", + "elliptic-curve", + "futures-util", + "governor", + "headers", + "hex", + "hyper", + "icu_normalizer", + "indexmap 2.11.4", + "insta", + "lettre", + "mas-axum-utils", + "mas-config", + "mas-context", + "mas-data-model", + "mas-email", + "mas-http", + "mas-i18n", + "mas-iana", + "mas-jose", + "mas-keystore", + "mas-matrix", + "mas-oidc-client", + "mas-policy", + "mas-router", + "mas-storage", + "mas-storage-pg", + "mas-tasks", + "mas-templates", + "mime", + "minijinja", + "minijinja-contrib", + "oauth2-types", + "opentelemetry", + "opentelemetry-semantic-conventions", + "pbkdf2", + "pkcs8", + "psl", + "rand 0.8.5", + "rand_chacha 0.3.1", + "reqwest", + "rustls", + "schemars 0.9.0", + "sentry", + "serde", + "serde_json", + "serde_urlencoded", + "serde_with", + "sha2", + "sqlx", + "thiserror 2.0.17", + "tokio", + "tokio-util", + "tower", + "tower-http", + "tracing", + "tracing-subscriber", + "ulid", + "url", + "wiremock", + "zeroize", + "zxcvbn", +] + +[[package]] +name = "mas-http" +version = "1.12.0" +dependencies = [ + "futures-util", + "headers", + "http", + "hyper-util", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-semantic-conventions", + "reqwest", + "rustls", + "rustls-platform-verifier", + "tokio", + "tower", + "tower-http", + "tracing", + "tracing-opentelemetry", +] + +[[package]] +name = "mas-i18n" +version = "1.12.0" +dependencies = [ + "camino", + "icu_calendar", + "icu_datetime", + "icu_experimental", + "icu_locid", + "icu_locid_transform", + "icu_plurals", + "icu_provider", + "icu_provider_adapters", + "pad", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror 2.0.17", + "writeable", +] + +[[package]] +name = "mas-i18n-scan" +version = "1.12.0" +dependencies = [ + "camino", + "clap", + "mas-i18n", + "minijinja", + "serde_json", + "tracing", + "tracing-subscriber", + "walkdir", +] + +[[package]] +name = "mas-iana" +version = "1.12.0" +dependencies = [ + "schemars 0.9.0", + "serde", +] + +[[package]] +name = "mas-iana-codegen" +version = "1.12.0" +dependencies = [ + "anyhow", + "async-trait", + "camino", + "convert_case", + "csv", + "reqwest", + "rustls", + "serde", + "tokio", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "mas-jose" +version = "1.12.0" +dependencies = [ + "base64ct", + "chrono", + "digest", + "ecdsa", + "elliptic-curve", + "generic-array", + "hmac", + "insta", + "k256", + "mas-iana", + "p256", + "p384", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rsa", + "schemars 0.9.0", + "sec1", + "serde", + "serde_json", + "serde_with", + "sha2", + "signature", + "thiserror 2.0.17", + "url", +] + +[[package]] +name = "mas-keystore" +version = "1.12.0" +dependencies = [ + "aead", + "base64ct", + "chacha20poly1305", + "const-oid", + "der", + "elliptic-curve", + "generic-array", + "insta", + "k256", + "mas-iana", + "mas-jose", + "p256", + "p384", + "pem-rfc7468", + "pkcs1", + "pkcs8", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rsa", + "sec1", + "spki", + "thiserror 2.0.17", +] + +[[package]] +name = "mas-listener" +version = "1.12.0" +dependencies = [ + "anyhow", + "bytes", + "futures-util", + "http-body", + "hyper", + "hyper-util", + "mas-context", + "pin-project-lite", + "socket2", + "thiserror 2.0.17", + "tokio", + "tokio-rustls", + "tokio-test", + "tokio-util", + "tower", + "tower-http", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "mas-matrix" +version = "1.12.0" +dependencies = [ + "anyhow", + "async-trait", + "ruma-common", + "tokio", +] + +[[package]] +name = "mas-matrix-synapse" +version = "1.12.0" +dependencies = [ + "anyhow", + "async-trait", + "http", + "mas-http", + "mas-matrix", + "reqwest", + "serde", + "thiserror 2.0.17", + "tracing", + "url", + "urlencoding", +] + +[[package]] +name = "mas-oidc-client" +version = "1.12.0" +dependencies = [ + "assert_matches", + "async-trait", + "base64ct", + "bitflags", + "chrono", + "elliptic-curve", + "form_urlencoded", + "headers", + "http", + "http-body-util", + "language-tags", + "mas-http", + "mas-iana", + "mas-jose", + "mas-keystore", + "mime", + "oauth2-types", + "p256", + "rand 0.8.5", + "rand_chacha 0.3.1", + "reqwest", + "rustls", + "serde", + "serde_json", + "serde_urlencoded", + "thiserror 2.0.17", + "tokio", + "tracing", + "url", + "wiremock", +] + +[[package]] +name = "mas-policy" +version = "1.12.0" +dependencies = [ + "anyhow", + "arc-swap", + "mas-data-model", + "oauth2-types", + "opa-wasm", + "schemars 0.9.0", + "serde", + "serde_json", + "thiserror 2.0.17", + "tokio", + "tracing", +] + +[[package]] +name = "mas-router" +version = "1.12.0" +dependencies = [ + "axum", + "serde", + "serde_urlencoded", + "ulid", + "url", +] + +[[package]] +name = "mas-spa" +version = "1.12.0" +dependencies = [ + "camino", + "serde", + "thiserror 2.0.17", +] + +[[package]] +name = "mas-storage" +version = "1.12.0" +dependencies = [ + "async-trait", + "chrono", + "futures-util", + "mas-data-model", + "mas-iana", + "mas-jose", + "oauth2-types", + "opentelemetry", + "rand_core 0.6.4", + "serde", + "serde_json", + "thiserror 2.0.17", + "tracing", + "tracing-opentelemetry", + "ulid", + "url", +] + +[[package]] +name = "mas-storage-pg" +version = "1.12.0" +dependencies = [ + "async-trait", + "chrono", + "crc", + "futures-util", + "mas-data-model", + "mas-iana", + "mas-jose", + "mas-storage", + "oauth2-types", + "opentelemetry", + "opentelemetry-semantic-conventions", + "rand 0.8.5", + "rand_chacha 0.3.1", + "sea-query", + "sea-query-binder", + "serde_json", + "sha2", + "sqlx", + "thiserror 2.0.17", + "tokio", + "tracing", + "ulid", + "url", + "uuid", +] + +[[package]] +name = "mas-tasks" +version = "1.12.0" +dependencies = [ + "anyhow", + "async-trait", + "chrono", + "cron", + "mas-context", + "mas-data-model", + "mas-email", + "mas-i18n", + "mas-matrix", + "mas-router", + "mas-storage", + "mas-storage-pg", + "mas-templates", + "opentelemetry", + "opentelemetry-semantic-conventions", + "rand 0.8.5", + "rand_chacha 0.3.1", + "serde", + "serde_json", + "sqlx", + "thiserror 2.0.17", + "tokio", + "tokio-util", + "tracing", + "tracing-opentelemetry", + "ulid", +] + +[[package]] +name = "mas-templates" +version = "1.12.0" +dependencies = [ + "anyhow", + "arc-swap", + "camino", + "chrono", + "http", + "mas-data-model", + "mas-i18n", + "mas-iana", + "mas-policy", + "mas-router", + "mas-spa", + "minijinja", + "minijinja-contrib", + "oauth2-types", + "rand 0.8.5", + "rand_chacha 0.3.1", + "serde", + "serde_json", + "serde_urlencoded", + "thiserror 2.0.17", + "tokio", + "tracing", + "ulid", + "url", + "v_htmlescape", + "walkdir", +] + +[[package]] +name = "mas-tower" +version = "1.12.0" +dependencies = [ + "http", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-semantic-conventions", + "pin-project-lite", + "tower", + "tracing", + "tracing-opentelemetry", +] + +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "memfd" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad38eb12aea514a0466ea40a80fd8cc83637065948eb4a426e4aa46261175227" +dependencies = [ + "rustix", +] + +[[package]] +name = "memo-map" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d1115007560874e373613744c6fba374c17688327a71c1476d1a5954cc857b" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minijinja" +version = "2.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b479616bb6f0779fb0f3964246beda02d4b01144e1b0d5519616e012ccc2a245" +dependencies = [ + "memo-map", + "percent-encoding", + "self_cell", + "serde", + "serde_json", + "v_htmlescape", +] + +[[package]] +name = "minijinja-contrib" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "182ba1438db4679ddfa03792c183bdc2b9ce26b58e7d41a749e59b06497cf136" +dependencies = [ + "minijinja", + "serde", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", +] + +[[package]] +name = "multer" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b" +dependencies = [ + "bytes", + "encoding_rs", + "futures-util", + "http", + "httparse", + "memchr", + "mime", + "spin", + "version_check", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nom" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" +dependencies = [ + "memchr", +] + +[[package]] +name = "nonzero_ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" + +[[package]] +name = "nu-ansi-term" +version = "0.50.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" +dependencies = [ + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-conv" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "num_threads" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" +dependencies = [ + "libc", +] + +[[package]] +name = "oauth2-types" +version = "1.12.0" +dependencies = [ + "assert_matches", + "base64ct", + "chrono", + "indexmap 2.11.4", + "insta", + "language-tags", + "mas-iana", + "mas-jose", + "serde", + "serde_json", + "serde_with", + "sha2", + "thiserror 2.0.17", + "url", +] + +[[package]] +name = "object" +version = "0.37.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" +dependencies = [ + "crc32fast", + "hashbrown 0.15.5", + "indexmap 2.11.4", + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" + +[[package]] +name = "opa-wasm" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "360f7106079ceb334afe98119289eec2ae5c0865f270a04f550e7f56a9bd507c" +dependencies = [ + "anyhow", + "base64", + "chrono", + "chrono-tz", + "chronoutil", + "digest", + "duration-str", + "form_urlencoded", + "hex", + "hmac", + "json-patch", + "md-5", + "parse-size", + "rand 0.8.5", + "rayon", + "semver", + "serde", + "serde_json", + "serde_yaml", + "sha1", + "sha2", + "sprintf", + "thiserror 2.0.17", + "tokio", + "tracing", + "urlencoding", + "version_check", + "wasmtime", +] + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "opentelemetry" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b84bcd6ae87133e903af7ef497404dda70c60d0ea14895fc8a5e6722754fc2a0" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror 2.0.17", + "tracing", +] + +[[package]] +name = "opentelemetry-http" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a6d09a73194e6b66df7c8f1b680f156d916a1a942abf2de06823dd02b7855d" +dependencies = [ + "async-trait", + "bytes", + "http", + "opentelemetry", + "reqwest", +] + +[[package]] +name = "opentelemetry-instrumentation-process" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a81f1738bbdcf58eae20bd85ab0973f9c1f700ba37789e3253dc4e7ba61855c" +dependencies = [ + "opentelemetry", + "opentelemetry-semantic-conventions", + "procfs", +] + +[[package]] +name = "opentelemetry-instrumentation-tokio" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fbe5cbcb32e33e68fd7a3a60d47c3b0a00cd40a4d012cec2f862f463dc0c296" +dependencies = [ + "opentelemetry", + "tokio", +] + +[[package]] +name = "opentelemetry-jaeger-propagator" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3bbd907f151104a112f749f3b8387ef669b7264e0bb80546ea0700a3b307b7" +dependencies = [ + "opentelemetry", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2366db2dca4d2ad033cad11e6ee42844fd727007af5ad04a1730f4cb8163bf" +dependencies = [ + "http", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-proto", + "opentelemetry_sdk", + "prost", + "thiserror 2.0.17", +] + +[[package]] +name = "opentelemetry-prometheus-text-exporter" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897906366b17a89bec845f6051e0c3474049402a09a0711eea180941293bd013" +dependencies = [ + "opentelemetry", + "opentelemetry_sdk", + "smartstring", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7175df06de5eaee9909d4805a3d07e28bb752c34cab57fa9cff549da596b30f" +dependencies = [ + "opentelemetry", + "opentelemetry_sdk", + "prost", + "tonic", + "tonic-prost", +] + +[[package]] +name = "opentelemetry-resource-detectors" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e82845106cf72d47c141cee7f0d95e0650d8f28c6222a1f1ae727a8883899c19" +dependencies = [ + "opentelemetry", + "opentelemetry-semantic-conventions", + "opentelemetry_sdk", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e62e29dfe041afb8ed2a6c9737ab57db4907285d999ef8ad3a59092a36bdc846" + +[[package]] +name = "opentelemetry-stdout" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc8887887e169414f637b18751487cce4e095be787d23fad13c454e2fb1b3811" +dependencies = [ + "chrono", + "opentelemetry", + "opentelemetry_sdk", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ae4f5991976fd48df6d843de219ca6d31b01daaab2dad5af2badeded372bd" +dependencies = [ + "futures-channel", + "futures-executor", + "futures-util", + "opentelemetry", + "percent-encoding", + "rand 0.9.2", + "thiserror 2.0.17", + "tokio", + "tokio-stream", +] + +[[package]] +name = "os_info" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0e1ac5fde8d43c34139135df8ea9ee9465394b2d8d20f032d38998f64afffc3" +dependencies = [ + "log", + "plist", + "serde", + "windows-sys 0.52.0", +] + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + +[[package]] +name = "p384" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe42f1670a52a47d448f14b6a5c61dd78fce51856e68edaa38f7ae3a46b8d6b6" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + +[[package]] +name = "pad" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2ad9b889f1b12e0b9ee24db044b5129150d5eada288edc800f789928dc8c0e3" +dependencies = [ + "unicode-width 0.1.14", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "parse-size" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "487f2ccd1e17ce8c1bfab3a65c89525af41cfad4c8659021a1e9a2aacd73b89b" + +[[package]] +name = "password-hash" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" +dependencies = [ + "base64ct", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "pbkdf2" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" +dependencies = [ + "digest", + "hmac", + "password-hash", + "rayon", + "sha2", +] + +[[package]] +name = "pear" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdeeaa00ce488657faba8ebf44ab9361f9365a97bd39ffb8a60663f57ff4b467" +dependencies = [ + "inlinable_string", + "pear_codegen", + "yansi", +] + +[[package]] +name = "pear_codegen" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bab5b985dc082b345f812b7df84e1bef27e7207b39e448439ba8bd69c93f147" +dependencies = [ + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pest" +version = "2.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4" +dependencies = [ + "memchr", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "187da9a3030dbafabbbfb20cb323b976dc7b7ce91fcd84f2f74d6e31d378e2de" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49b401d98f5757ebe97a26085998d6c0eecec4995cad6ab7fc30ffdf4b052843" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pest_meta" +version = "2.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72f27a2cfee9f9039c4d86faa5af122a0ac3851441a34865b8a043b46be0065a" +dependencies = [ + "pest", + "sha2", +] + +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs5" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e847e2c91a18bfa887dd028ec33f2fe6f25db77db3619024764914affe8b69a6" +dependencies = [ + "aes", + "cbc", + "der", + "pbkdf2", + "scrypt", + "sha2", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "pkcs5", + "rand_core 0.6.4", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "plist" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "740ebea15c5d1428f910cd1a5f52cebf8d25006245ed8ade92702f4943d91e07" +dependencies = [ + "base64", + "indexmap 2.11.4", + "quick-xml", + "serde", + "time", +] + +[[package]] +name = "polling" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "poly1305" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" +dependencies = [ + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "portable-atomic" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + +[[package]] +name = "postcard" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24" +dependencies = [ + "cobs", + "embedded-io 0.4.0", + "embedded-io 0.6.1", + "serde", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve", +] + +[[package]] +name = "proc-macro-crate" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" +dependencies = [ + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "proc-macro2-diagnostics" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "version_check", + "yansi", +] + +[[package]] +name = "procfs" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25485360a54d6861439d60facef26de713b1e126bf015ec8f98239467a2b82f7" +dependencies = [ + "bitflags", + "procfs-core", + "rustix", +] + +[[package]] +name = "procfs-core" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6401bf7b6af22f78b563665d15a22e9aef27775b79b149a66ca022468a4e405" +dependencies = [ + "bitflags", + "hex", +] + +[[package]] +name = "prost" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7231bd9b3d3d33c86b58adbac74b5ec0ad9f496b19d22801d773636feaa95f3d" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9120690fafc389a67ba3803df527d0ec9cbbc9cc45e4cc20b332996dfb672425" +dependencies = [ + "anyhow", + "itertools 0.14.0", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "psl" +version = "2.1.162" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9031bc0c03e389af72e52f11bc07869fbcc357f1b0774f9e2b3bce085a393416" +dependencies = [ + "psl-types", +] + +[[package]] +name = "psl-types" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33cb294fe86a74cbcf50d4445b37da762029549ebeea341421c7c70370f86cac" + +[[package]] +name = "psm" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e944464ec8536cd1beb0bbfd96987eb5e3b72f2ecdafdc5c769a37f1fa2ae1f" +dependencies = [ + "cc", +] + +[[package]] +name = "pulley-interpreter" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63350fc565d2b7ab7f610d0655b28f5f80348658c2cf33d05d7ec43356c4be3c" +dependencies = [ + "cranelift-bitset", + "log", + "pulley-macros", + "wasmtime-internal-math", +] + +[[package]] +name = "pulley-macros" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d46b2339b894fed7983d91e5723c40e4bc593cb78cd86ffac0798d29f21372e0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "quanta" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid", + "wasi 0.11.1+wasi-snapshot-preview1", + "web-sys", + "winapi", +] + +[[package]] +name = "quick-xml" +version = "0.38.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" +dependencies = [ + "memchr", +] + +[[package]] +name = "quote" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "quoted_printable" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "640c9bd8497b02465aeef5375144c26062e0dcd5939dfcbb0f5db76cb8c17c73" + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.16", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.3", +] + +[[package]] +name = "raw-cpuid" +version = "11.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186" +dependencies = [ + "bitflags", +] + +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redox_syscall" +version = "0.5.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +dependencies = [ + "bitflags", +] + +[[package]] +name = "ref-cast" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "regalloc2" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e249c660440317032a71ddac302f25f1d5dff387667bcc3978d1f77aa31ac34" +dependencies = [ + "allocator-api2", + "bumpalo", + "hashbrown 0.15.5", + "log", + "rustc-hash", + "smallvec", +] + +[[package]] +name = "regex" +version = "1.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "mime", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rsa" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + +[[package]] +name = "ruma-common" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac7f59b9f7639667d0d6ae3ae242c8912e9ed061cea1fbaf72710a402e83b53e" +dependencies = [ + "as_variant", + "base64", + "bytes", + "form_urlencoded", + "indexmap 2.11.4", + "js_int", + "percent-encoding", + "regex", + "ruma-identifiers-validation", + "ruma-macros", + "serde", + "serde_html_form", + "serde_json", + "thiserror 2.0.17", + "time", + "tracing", + "url", + "web-time", + "wildmatch", + "zeroize", +] + +[[package]] +name = "ruma-identifiers-validation" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14a7b93ac1e571c585f8fa5cef09c07bb8a15529775fd56b9a3eac4f9233dff2" +dependencies = [ + "js_int", + "thiserror 2.0.17", +] + +[[package]] +name = "ruma-macros" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c9911c7188517f28505d2d513339511d00e0f50cec5c2dde820cd0ec7e6a833" +dependencies = [ + "cfg-if", + "proc-macro-crate", + "proc-macro2", + "quote", + "ruma-identifiers-validation", + "serde", + "syn", + "toml", +] + +[[package]] +name = "rust_decimal" +version = "1.38.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8975fc98059f365204d635119cf9c5a60ae67b841ed49b5422a9a7e56cdfac0" +dependencies = [ + "arrayvec", + "num-traits", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.23.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" +dependencies = [ + "aws-lc-rs", + "log", + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pki-types" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94182ad936a0c91c324cd46c6511b9510ed16af436d7b5bab34beab0afd55f7a" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-platform-verifier" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be59af91596cac372a6942530653ad0c3a246cdd491aaa9dcaee47f88d67d5a0" +dependencies = [ + "core-foundation", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework", + "security-framework-sys", + "webpki-root-certs", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + +[[package]] +name = "rustls-webpki" +version = "0.103.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8572f3c2cb9934231157b45499fc41e1f58c589fdfb81a844ba873265e80f8eb" +dependencies = [ + "aws-lc-rs", + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "salsa20" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" +dependencies = [ + "cipher", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "chrono", + "dyn-clone", + "indexmap 2.11.4", + "ref-cast", + "schemars_derive", + "serde", + "serde_json", + "url", +] + +[[package]] +name = "schemars" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5016d94c77c6d32f0b8e08b781f7dc8a90c2007d4e77472cc2807bc10a8438fe" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "scrypt" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0516a385866c09368f0b5bcd1caff3366aace790fcd46e2bb032697bb172fd1f" +dependencies = [ + "pbkdf2", + "salsa20", + "sha2", +] + +[[package]] +name = "sd-notify" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b943eadf71d8b69e661330cb0e2656e31040acf21ee7708e2c238a0ec6af2bf4" +dependencies = [ + "libc", +] + +[[package]] +name = "sea-query" +version = "0.32.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a5d1c518eaf5eda38e5773f902b26ab6d5e9e9e2bb2349ca6c64cf96f80448c" +dependencies = [ + "chrono", + "inherent", + "sea-query-derive", + "uuid", +] + +[[package]] +name = "sea-query-binder" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0019f47430f7995af63deda77e238c17323359af241233ec768aba1faea7608" +dependencies = [ + "chrono", + "sea-query", + "sqlx", + "uuid", +] + +[[package]] +name = "sea-query-derive" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bae0cbad6ab996955664982739354128c58d16e126114fe88c2a493642502aab" +dependencies = [ + "darling", + "heck 0.4.1", + "proc-macro2", + "quote", + "syn", + "thiserror 2.0.17", +] + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "security-framework" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b369d18893388b345804dc0007963c99b7d665ae71d275812d828c6f089640" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "self_cell" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f7d95a54511e0c7be3f51e8867aa8cf35148d7b9445d44de2f943e2b206e749" + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "sentry" +version = "0.46.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d92d893ba7469d361a6958522fa440e4e2bc8bf4c5803cd1bf40b9af63f8f9a8" +dependencies = [ + "cfg_aliases", + "httpdate", + "reqwest", + "sentry-backtrace", + "sentry-contexts", + "sentry-core", + "sentry-panic", + "sentry-tower", + "sentry-tracing", + "tokio", +] + +[[package]] +name = "sentry-backtrace" +version = "0.46.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f8784d0a27b5cd4b5f75769ffc84f0b7580e3c35e1af9cd83cb90b612d769cc" +dependencies = [ + "backtrace", + "regex", + "sentry-core", +] + +[[package]] +name = "sentry-contexts" +version = "0.46.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e5eb42f4cd4f9fdfec9e3b07b25a4c9769df83d218a7e846658984d5948ad3e" +dependencies = [ + "hostname", + "libc", + "os_info", + "rustc_version", + "sentry-core", + "uname", +] + +[[package]] +name = "sentry-core" +version = "0.46.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0b1e7ca40f965db239da279bf278d87b7407469b98835f27f0c8e59ed189b06" +dependencies = [ + "rand 0.9.2", + "sentry-types", + "serde", + "serde_json", + "url", +] + +[[package]] +name = "sentry-panic" +version = "0.46.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8906f8be87aea5ac7ef937323fb655d66607427f61007b99b7cb3504dc5a156c" +dependencies = [ + "sentry-backtrace", + "sentry-core", +] + +[[package]] +name = "sentry-tower" +version = "0.46.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56aebe376310840b49dad4cca55c7b32d9abdc14946cd071d4158ecb149b63a4" +dependencies = [ + "axum", + "http", + "pin-project", + "sentry-core", + "tower-layer", + "tower-service", + "url", +] + +[[package]] +name = "sentry-tracing" +version = "0.46.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b07eefe04486316c57aba08ab53dd44753c25102d1d3fe05775cc93a13262d9" +dependencies = [ + "bitflags", + "sentry-backtrace", + "sentry-core", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "sentry-types" +version = "0.46.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567711f01f86a842057e1fc17779eba33a336004227e1a1e7e6cc2599e22e259" +dependencies = [ + "debugid", + "hex", + "rand 0.9.2", + "serde", + "serde_json", + "thiserror 2.0.17", + "time", + "url", + "uuid", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_html_form" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2f2d7ff8a2140333718bb329f5c40fc5f0865b84c426183ce14c97d2ab8154f" +dependencies = [ + "form_urlencoded", + "indexmap 2.11.4", + "itoa", + "ryu", + "serde_core", +] + +[[package]] +name = "serde_json" +version = "1.0.145" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +dependencies = [ + "indexmap 2.11.4", + "itoa", + "memchr", + "ryu", + "serde", + "serde_core", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + +[[package]] +name = "serde_qs" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b417bedc008acbdf6d6b4bc482d29859924114bbe2650b7921fb68a261d0aa6" +dependencies = [ + "axum", + "futures", + "percent-encoding", + "serde", + "thiserror 2.0.17", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" +dependencies = [ + "base64", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.11.4", + "schemars 0.9.0", + "schemars 1.1.0", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.11.4", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shell-words" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "slab" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +dependencies = [ + "serde", +] + +[[package]] +name = "smartstring" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29" +dependencies = [ + "autocfg", + "static_assertions", + "version_check", +] + +[[package]] +name = "socket2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spinning_top" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96d2d1d716fb500937168cc09353ffdc7a012be8475ac7308e1bdf0e3923300" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "sprintf" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78222247fc55e10208ed1ba60f8296390bc67a489bc27a36231765d8d6f60ec5" +dependencies = [ + "thiserror 2.0.17", +] + +[[package]] +name = "sqlx" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" +dependencies = [ + "base64", + "bytes", + "chrono", + "crc", + "crossbeam-queue", + "either", + "event-listener 5.4.1", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.15.5", + "hashlink", + "indexmap 2.11.4", + "ipnetwork", + "log", + "memchr", + "once_cell", + "percent-encoding", + "rustls", + "serde", + "serde_json", + "sha2", + "smallvec", + "thiserror 2.0.17", + "tokio", + "tokio-stream", + "tracing", + "url", + "uuid", + "webpki-roots 0.26.11", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" +dependencies = [ + "dotenvy", + "either", + "heck 0.5.0", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" +dependencies = [ + "atoi", + "base64", + "bitflags", + "byteorder", + "bytes", + "chrono", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" +dependencies = [ + "atoi", + "base64", + "bitflags", + "byteorder", + "chrono", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "ipnetwork", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand 0.8.5", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" +dependencies = [ + "atoi", + "chrono", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "thiserror 2.0.17", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "stacker" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cddb07e32ddb770749da91081d8d0ac3a16f1a569a18b20348cd371f5dead06b" +dependencies = [ + "cc", + "cfg-if", + "libc", + "psm", + "windows-sys 0.59.0", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "static_assertions_next" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7beae5182595e9a8b683fa98c4317f956c9a2dec3b9716990d20023cc60c766" + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn2mas" +version = "1.12.0" +dependencies = [ + "anyhow", + "arc-swap", + "bitflags", + "camino", + "chrono", + "compact_str", + "figment", + "futures-util", + "insta", + "mas-config", + "mas-data-model", + "mas-iana", + "mas-storage", + "mas-storage-pg", + "oauth2-types", + "opentelemetry", + "opentelemetry-semantic-conventions", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rustc-hash", + "serde", + "serde_json", + "sqlx", + "thiserror 2.0.17", + "thiserror-ext", + "tokio", + "tokio-util", + "tracing", + "ulid", + "url", + "uuid", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "target-lexicon" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df7f62577c25e07834649fc3b39fafdc597c0a3527dc1c60129201ccfcbaa50c" + +[[package]] +name = "tempfile" +version = "3.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84fa4d11fadde498443cca10fd3ac23c951f0dc59e080e9f4b93d4df4e4eea53" +dependencies = [ + "fastrand", + "getrandom 0.3.3", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" +dependencies = [ + "thiserror-impl 2.0.17", +] + +[[package]] +name = "thiserror-ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fb7e61141f4141832ca9aad63c3c90023843f944a1975460abdacc64d03f534" +dependencies = [ + "thiserror 2.0.17", + "thiserror-ext-derive", +] + +[[package]] +name = "thiserror-ext-derive" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b5042dd3b562d1d57711be902006a0003fa2781b81d5b2bec07416be31586ff" +dependencies = [ + "either", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "time" +version = "0.3.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" +dependencies = [ + "deranged", + "itoa", + "libc", + "num-conv", + "num_threads", + "powerfmt", + "serde_core", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" + +[[package]] +name = "time-macros" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-test" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7" +dependencies = [ + "async-stream", + "bytes", + "futures-core", + "tokio", + "tokio-stream", +] + +[[package]] +name = "tokio-util" +version = "0.7.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "futures-util", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap 2.11.4", + "serde", + "serde_spanned", + "toml_datetime", + "winnow 0.7.13", +] + +[[package]] +name = "tonic" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb7613188ce9f7df5bfe185db26c5814347d110db17920415cf2fbcad85e7203" +dependencies = [ + "async-trait", + "base64", + "bytes", + "http", + "http-body", + "http-body-util", + "percent-encoding", + "pin-project", + "sync_wrapper", + "tokio-stream", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tonic-prost" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66bd50ad6ce1252d87ef024b3d64fe4c3cf54a86fb9ef4c631fdd0ded7aeaa67" +dependencies = [ + "bytes", + "prost", + "tonic", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "http-range-header", + "httpdate", + "iri-string", + "mime", + "mime_guess", + "percent-encoding", + "pin-project-lite", + "tokio", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-appender" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" +dependencies = [ + "crossbeam-channel", + "thiserror 2.0.17", + "time", + "tracing-subscriber", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "futures", + "futures-task", + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6e5658463dd88089aba75c7791e1d3120633b1bfde22478b28f625a9bb1b8e" +dependencies = [ + "js-sys", + "opentelemetry", + "opentelemetry_sdk", + "rustversion", + "thiserror 2.0.17", + "tracing", + "tracing-core", + "tracing-subscriber", + "web-time", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" + +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + +[[package]] +name = "ulid" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f294bff79170ed1c5633812aff1e565c35d993a36e757f9bc0accf5eec4e6045" +dependencies = [ + "rand 0.8.5", + "serde", + "uuid", + "web-time", +] + +[[package]] +name = "uname" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8" +dependencies = [ + "libc", +] + +[[package]] +name = "uncased" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicase" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unicode-width" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "uuid" +version = "1.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + +[[package]] +name = "v_htmlescape" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e8257fbc510f0a46eb602c10215901938b5c2a7d5e70fc11483b1d3c9b5b18c" + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "value-bag" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vergen" +version = "9.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b2bf58be11fc9414104c6d3a2e464163db5ef74b12296bda593cac37b6e4777" +dependencies = [ + "anyhow", + "derive_builder", + "rustc_version", + "rustversion", + "vergen-lib", +] + +[[package]] +name = "vergen-gitcl" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9dfc1de6eb2e08a4ddf152f1b179529638bedc0ea95e6d667c014506377aefe" +dependencies = [ + "anyhow", + "derive_builder", + "rustversion", + "time", + "vergen", + "vergen-lib", +] + +[[package]] +name = "vergen-lib" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b07e6010c0f3e59fcb164e0163834597da68d1f864e2b8ca49f74de01e9c166" +dependencies = [ + "anyhow", + "derive_builder", + "rustversion", +] + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasi" +version = "0.14.7+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" +dependencies = [ + "wasip2", +] + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e14915cadd45b529bb8d1f343c4ed0ac1de926144b746e2710f9cd05df6603b" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28d1ba982ca7923fd01448d5c30c6864d0a14109560296a162f80f305fb93bb" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ca85039a9b469b38336411d6d6ced91f3fc87109a2a27b0c197663f5144dffe" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c3d463ae3eff775b0c45df9da45d68837702ac35af998361e2c84e7c5ec1b0d" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bb4ce89b08211f923caf51d527662b75bdc9c9c7aab40f86dcb9fb85ac552aa" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f143854a3b13752c6950862c906306adb27c7e839f7414cec8fea35beab624c1" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.243.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c55db9c896d70bd9fa535ce83cd4e1f2ec3726b0edd2142079f594fc3be1cb35" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.243.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6d8db401b0528ec316dfbe579e6ab4152d61739cfe076706d2009127970159d" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap 2.11.4", + "semver", + "serde", +] + +[[package]] +name = "wasmprinter" +version = "0.243.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb2b6035559e146114c29a909a3232928ee488d6507a1504d8934e8607b36d7b" +dependencies = [ + "anyhow", + "termcolor", + "wasmparser", +] + +[[package]] +name = "wasmtime" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f033059744520d5943887511a83731856a78a00f0dac943dc9e9d2292289cad" +dependencies = [ + "addr2line", + "anyhow", + "async-trait", + "bitflags", + "bumpalo", + "cc", + "cfg-if", + "hashbrown 0.15.5", + "indexmap 2.11.4", + "libc", + "log", + "mach2", + "memfd", + "object", + "once_cell", + "postcard", + "pulley-interpreter", + "rayon", + "rustix", + "serde", + "serde_derive", + "smallvec", + "target-lexicon", + "wasmparser", + "wasmtime-environ", + "wasmtime-internal-component-macro", + "wasmtime-internal-cranelift", + "wasmtime-internal-fiber", + "wasmtime-internal-jit-debug", + "wasmtime-internal-jit-icache-coherence", + "wasmtime-internal-math", + "wasmtime-internal-slab", + "wasmtime-internal-unwinder", + "wasmtime-internal-versioned-export-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "wasmtime-environ" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85c6de1f26b145fbe9e6980b0495e1c855920091d31c0d1e32e7e49318211103" +dependencies = [ + "anyhow", + "cranelift-bitset", + "cranelift-entity", + "gimli", + "indexmap 2.11.4", + "log", + "object", + "postcard", + "serde", + "serde_derive", + "smallvec", + "target-lexicon", + "wasm-encoder", + "wasmparser", + "wasmprinter", +] + +[[package]] +name = "wasmtime-internal-component-macro" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bc0996221d4e178c1b9286aa40e448af1cdf40a37f1a3f71755f0502a11eb23" +dependencies = [ + "anyhow", + "proc-macro2", + "quote", + "syn", + "wasmtime-internal-component-util", + "wasmtime-internal-wit-bindgen", + "wit-parser", +] + +[[package]] +name = "wasmtime-internal-component-util" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146d953836b26c44dc39173b00c5a783e9adcb4369460b2052169cd81e90e729" + +[[package]] +name = "wasmtime-internal-cranelift" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1fbd0cae8d129883a7bad7f2272e6662dcebf4e0f6b38539603359235959a" +dependencies = [ + "anyhow", + "cfg-if", + "cranelift-codegen", + "cranelift-control", + "cranelift-entity", + "cranelift-frontend", + "cranelift-native", + "gimli", + "itertools 0.14.0", + "log", + "object", + "pulley-interpreter", + "smallvec", + "target-lexicon", + "thiserror 2.0.17", + "wasmparser", + "wasmtime-environ", + "wasmtime-internal-math", + "wasmtime-internal-unwinder", + "wasmtime-internal-versioned-export-macros", +] + +[[package]] +name = "wasmtime-internal-fiber" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52a2a724a50b8ace66a6089002cbe99eec0f611a15c78262739b6aeb590ab252" +dependencies = [ + "anyhow", + "cc", + "cfg-if", + "libc", + "rustix", + "wasmtime-internal-versioned-export-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "wasmtime-internal-jit-debug" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f3ea5e264b8f6a3a91444ffee58449288e239c9d60c2483bf78c631f3269fa7" +dependencies = [ + "cc", + "wasmtime-internal-versioned-export-macros", +] + +[[package]] +name = "wasmtime-internal-jit-icache-coherence" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c161f4e0636998a68f2c2159260a0d8bbb2d2d2b762938f7be62b2ac0535ed4" +dependencies = [ + "anyhow", + "cfg-if", + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "wasmtime-internal-math" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55d246d16cad85ab49f6e76026d934df2f45974d97eb2ab837a6312dda4c76a" +dependencies = [ + "libm", +] + +[[package]] +name = "wasmtime-internal-slab" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e6e7127a10a3d38939c54fa3e1701512bd78340ec112ffc628c36516e38bd3a" + +[[package]] +name = "wasmtime-internal-unwinder" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a899f4006b6332a9312060c9216beaf58447da5939af8f19144138f59d6e366" +dependencies = [ + "anyhow", + "cfg-if", + "cranelift-codegen", + "log", + "object", +] + +[[package]] +name = "wasmtime-internal-versioned-export-macros" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "426e1088960ab200c49b8e5812667a442c705df018b5c57bd9b3cf80c12b0bdb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "wasmtime-internal-wit-bindgen" +version = "40.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bf7e687a48c5b82d81c59c8963a4f90ca495955170fab9c1bf75176fd1ba014" +dependencies = [ + "anyhow", + "bitflags", + "heck 0.5.0", + "indexmap 2.11.4", + "wit-parser", +] + +[[package]] +name = "web-sys" +version = "0.3.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77e4b637749ff0d92b8fad63aa1f7cff3cbe125fd49c175cd6345e7272638b12" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-root-certs" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4ffd8df1c57e87c325000a3d6ef93db75279dc3a231125aac571650f22b12a" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "webpki-roots" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.2", +] + +[[package]] +name = "webpki-roots" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", +] + +[[package]] +name = "wildmatch" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39b7d07a236abaef6607536ccfaf19b396dbe3f5110ddb73d39f4562902ed382" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.62.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57fe7168f7de578d2d8a05b07fd61870d2e73b4020e9f49aa00da8471723497c" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link 0.2.1", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-result" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7084dcc306f89883455a206237404d3eaf961e5bd7e0f312f7c91f57eb44167f" +dependencies = [ + "windows-link 0.2.1", +] + +[[package]] +name = "windows-strings" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7218c655a553b0bed4426cf54b20d7ba363ef543b52d515b3e48d7fd55318dda" +dependencies = [ + "windows-link 0.2.1", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.3", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link 0.2.1", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +dependencies = [ + "windows-link 0.1.3", + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] +name = "winnow" +version = "0.6.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e90edd2ac1aa278a5c4599b1d89cf03074b610800f866d4026dc199d7929a28" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" +dependencies = [ + "memchr", +] + +[[package]] +name = "wiremock" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08db1edfb05d9b3c1542e521aea074442088292f00b5f28e435c714a98f85031" +dependencies = [ + "assert-json-diff", + "base64", + "deadpool", + "futures", + "http", + "http-body-util", + "hyper", + "hyper-util", + "log", + "once_cell", + "regex", + "serde", + "serde_json", + "tokio", + "url", +] + +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + +[[package]] +name = "wit-parser" +version = "0.243.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df983a8608e513d8997f435bb74207bf0933d0e49ca97aa9d8a6157164b9b7fc" +dependencies = [ + "anyhow", + "id-arena", + "indexmap 2.11.4", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "woothee" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "896174c6a4779d4d7d4523dd27aef7d46609eda2497e370f6c998325c6bf6971" +dependencies = [ + "lazy_static", + "regex", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +dependencies = [ + "either", +] + +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb594dd55d87335c5f60177cee24f19457a5ec10a065e0a3014722ad252d0a1f" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zxcvbn" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad76e35b00ad53688d6b90c431cabe3cbf51f7a4a154739e04b63004ab1c736c" +dependencies = [ + "chrono", + "derive_builder", + "fancy-regex", + "itertools 0.13.0", + "lazy_static", + "regex", + "time", + "wasm-bindgen", + "web-sys", +] diff --git a/matrix-authentication-service/Cargo.toml b/matrix-authentication-service/Cargo.toml new file mode 100644 index 00000000..2b0ab4ae --- /dev/null +++ b/matrix-authentication-service/Cargo.toml @@ -0,0 +1,762 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[workspace] +default-members = ["crates/cli"] +members = ["crates/*"] +resolver = "2" + +# Updated in the CI with a `sed` command +package.version = "1.12.0" +package.license = "AGPL-3.0-only OR LicenseRef-Element-Commercial" +package.authors = ["Element Backend Team"] +package.edition = "2024" +package.homepage = "https://element-hq.github.io/matrix-authentication-service/" +package.repository = "https://github.com/element-hq/matrix-authentication-service/" +package.publish = false + +[workspace.lints.rust] +unsafe_code = "deny" + +[workspace.lints.clippy] +# We use groups as good defaults, but with a lower priority so that we can override them +all = { level = "deny", priority = -1 } +pedantic = { level = "warn", priority = -1 } + +str_to_string = "deny" +too_many_lines = "allow" + +[workspace.lints.rustdoc] +broken_intra_doc_links = "deny" + +[workspace.dependencies] + +# Workspace crates +mas-axum-utils = { path = "./crates/axum-utils/", version = "=1.12.0" } +mas-cli = { path = "./crates/cli/", version = "=1.12.0" } +mas-config = { path = "./crates/config/", version = "=1.12.0" } +mas-context = { path = "./crates/context/", version = "=1.12.0" } +mas-data-model = { path = "./crates/data-model/", version = "=1.12.0" } +mas-email = { path = "./crates/email/", version = "=1.12.0" } +mas-graphql = { path = "./crates/graphql/", version = "=1.12.0" } +mas-handlers = { path = "./crates/handlers/", version = "=1.12.0" } +mas-http = { path = "./crates/http/", version = "=1.12.0" } +mas-i18n = { path = "./crates/i18n/", version = "=1.12.0" } +mas-i18n-scan = { path = "./crates/i18n-scan/", version = "=1.12.0" } +mas-iana = { path = "./crates/iana/", version = "=1.12.0" } +mas-iana-codegen = { path = "./crates/iana-codegen/", version = "=1.12.0" } +mas-jose = { path = "./crates/jose/", version = "=1.12.0" } +mas-keystore = { path = "./crates/keystore/", version = "=1.12.0" } +mas-listener = { path = "./crates/listener/", version = "=1.12.0" } +mas-matrix = { path = "./crates/matrix/", version = "=1.12.0" } +mas-matrix-synapse = { path = "./crates/matrix-synapse/", version = "=1.12.0" } +mas-oidc-client = { path = "./crates/oidc-client/", version = "=1.12.0" } +mas-policy = { path = "./crates/policy/", version = "=1.12.0" } +mas-router = { path = "./crates/router/", version = "=1.12.0" } +mas-spa = { path = "./crates/spa/", version = "=1.12.0" } +mas-storage = { path = "./crates/storage/", version = "=1.12.0" } +mas-storage-pg = { path = "./crates/storage-pg/", version = "=1.12.0" } +mas-tasks = { path = "./crates/tasks/", version = "=1.12.0" } +mas-templates = { path = "./crates/templates/", version = "=1.12.0" } +mas-tower = { path = "./crates/tower/", version = "=1.12.0" } +oauth2-types = { path = "./crates/oauth2-types/", version = "=1.12.0" } +syn2mas = { path = "./crates/syn2mas", version = "=1.12.0" } + +# OpenAPI schema generation and validation +[workspace.dependencies.aide] +version = "0.15.1" +features = ["axum", "axum-extra", "axum-extra-query", "axum-json", "macros"] + +# An `Arc` that can be atomically updated +[workspace.dependencies.arc-swap] +version = "1.8.1" + +# GraphQL server +[workspace.dependencies.async-graphql] +version = "7.0.17" +default-features = false +features = ["chrono", "url", "tracing", "playground"] + +[workspace.dependencies.async-stream] +version = "0.3.6" + +# Utility to write and implement async traits +[workspace.dependencies.async-trait] +version = "0.1.89" + +# High-level error handling +[workspace.dependencies.anyhow] +version = "1.0.100" + +# Assert that a value matches a pattern +[workspace.dependencies.assert_matches] +version = "1.5.0" + +# HTTP router +[workspace.dependencies.axum] +version = "0.8.6" + +# Extra utilities for Axum +[workspace.dependencies.axum-extra] +version = "0.10.3" +features = ["cookie-private", "cookie-key-expansion", "typed-header", "query"] + +# Axum macros +[workspace.dependencies.axum-macros] +version = "0.5.0" + +# AEAD (Authenticated Encryption with Associated Data) +[workspace.dependencies.aead] +version = "0.5.2" +features = ["std"] + +# Argon2 password hashing +[workspace.dependencies.argon2] +version = "0.5.3" +features = ["password-hash", "std"] + +# Constant-time base64 +[workspace.dependencies.base64ct] +version = "1.8.0" +features = ["std"] + +# Bcrypt password hashing +[workspace.dependencies.bcrypt] +version = "0.18.0" +default-features = true + +# Packed bitfields +[workspace.dependencies.bitflags] +version = "2.10.0" + +# Bytes +[workspace.dependencies.bytes] +version = "1.10.1" + +# UTF-8 paths +[workspace.dependencies.camino] +version = "1.2.1" +features = ["serde1"] + +# ChaCha20Poly1305 AEAD +[workspace.dependencies.chacha20poly1305] +version = "0.10.1" +features = ["std"] + +# Memory optimisation for short strings +[workspace.dependencies.compact_str] +version = "0.9.0" + +# Terminal formatting +[workspace.dependencies.console] +version = "0.15.11" + +# Cookie store +[workspace.dependencies.cookie_store] +version = "0.22.0" +default-features = false +features = ["serde_json"] + +# Time utilities +[workspace.dependencies.chrono] +version = "0.4.42" +default-features = false +features = ["serde", "clock"] + +# CLI argument parsing +[workspace.dependencies.clap] +version = "4.5.50" +features = ["derive"] + +# Object Identifiers (OIDs) as constants +[workspace.dependencies.const-oid] +version = "0.9.6" +features = ["std"] + +# Utility for converting between different cases +[workspace.dependencies.convert_case] +version = "0.9.0" + +# CRC calculation +[workspace.dependencies.crc] +version = "3.3.0" + +# Cron expressions +[workspace.dependencies.cron] +version = "0.15.0" + +# CSV parsing and writing +[workspace.dependencies.csv] +version = "1.4.0" + +# DER encoding +[workspace.dependencies.der] +version = "0.7.10" +features = ["std"] + +# Interactive CLI dialogs +[workspace.dependencies.dialoguer] +version = "0.11.0" +default-features = false +features = ["fuzzy-select", "password"] + +# Cryptographic digest algorithms +[workspace.dependencies.digest] +version = "0.10.7" + +# Load environment variables from .env files +[workspace.dependencies.dotenvy] +version = "0.15.7" + +# ECDSA algorithms +[workspace.dependencies.ecdsa] +version = "0.16.9" +features = ["signing", "verifying"] + +# Elliptic curve cryptography +[workspace.dependencies.elliptic-curve] +version = "0.13.8" +features = ["std", "pem", "sec1"] + +# Configuration loading +[workspace.dependencies.figment] +version = "0.10.19" +features = ["env", "yaml", "test"] + +# URL form encoding +[workspace.dependencies.form_urlencoded] +version = "1.2.2" + +# Utilities for dealing with futures +[workspace.dependencies.futures-util] +version = "0.3.31" + +# Fixed-size arrays with trait implementations +[workspace.dependencies.generic-array] +version = "0.14.7" + +# Rate-limiting +[workspace.dependencies.governor] +version = "0.10.1" +default-features = false +features = ["std", "dashmap", "quanta"] + +# HMAC calculation +[workspace.dependencies.hmac] +version = "0.12.1" + +# HTTP headers +[workspace.dependencies.headers] +version = "0.4.1" + +# Hex encoding and decoding +[workspace.dependencies.hex] +version = "0.4.3" + +# HTTP request/response +[workspace.dependencies.http] +version = "1.3.1" + +# HTTP body trait +[workspace.dependencies.http-body] +version = "1.0.1" + +# http-body utilities +[workspace.dependencies.http-body-util] +version = "0.1.3" + +# HTTP client and server +[workspace.dependencies.hyper] +version = "1.7.0" +features = ["client", "server", "http1", "http2"] + +# Additional Hyper utilties +[workspace.dependencies.hyper-util] +version = "0.1.18" +features = [ + "client", + "server", + "server-auto", + "service", + "http1", + "http2", + "tokio", +] + +# Hyper Rustls support +[workspace.dependencies.hyper-rustls] +version = "0.27.7" +features = ["http1", "http2"] +default-features = false + +# ICU libraries for internationalization +[workspace.dependencies.icu_calendar] +version = "1.5.2" +features = ["compiled_data", "std"] +[workspace.dependencies.icu_datetime] +version = "1.5.1" +features = ["compiled_data", "std"] +[workspace.dependencies.icu_experimental] +version = "0.1.0" +features = ["compiled_data", "std"] +[workspace.dependencies.icu_locid] +version = "1.5.0" +features = ["std"] +[workspace.dependencies.icu_locid_transform] +version = "1.5.0" +features = ["compiled_data", "std"] +[workspace.dependencies.icu_normalizer] +version = "1.5.0" +[workspace.dependencies.icu_plurals] +version = "1.5.0" +features = ["compiled_data", "std"] +[workspace.dependencies.icu_provider] +version = "1.5.0" +features = ["std", "sync"] +[workspace.dependencies.icu_provider_adapters] +version = "1.5.0" +features = ["std"] + +# HashMap which preserves insertion order +[workspace.dependencies.indexmap] +version = "2.11.4" +features = ["serde"] + +# Indented string literals +[workspace.dependencies.indoc] +version = "2.0.6" + +# Snapshot testing +[workspace.dependencies.insta] +version = "1.46.3" +features = ["yaml", "json"] + +# IP network address types +[workspace.dependencies.ipnetwork] +version = "0.20.0" +features = ["serde"] + +# Iterator utilities +[workspace.dependencies.itertools] +version = "0.14.0" + +# K256 elliptic curve +[workspace.dependencies.k256] +version = "0.13.4" +features = ["std"] + +# RFC 5646 language tags +[workspace.dependencies.language-tags] +version = "0.3.2" +features = ["serde"] + +# Email sending +[workspace.dependencies.lettre] +version = "0.11.19" +default-features = false +features = [ + "tokio1-rustls", + "rustls-platform-verifier", + "aws-lc-rs", + "hostname", + "builder", + "tracing", + "pool", + "smtp-transport", + "sendmail-transport", +] + +# Listening on passed FDs +[workspace.dependencies.listenfd] +version = "1.0.2" + +# MIME type support +[workspace.dependencies.mime] +version = "0.3.17" + +# Templates +[workspace.dependencies.minijinja] +version = "2.15.1" +features = ["urlencode", "loader", "json", "speedups", "unstable_machinery"] + +# Additional filters for minijinja +[workspace.dependencies.minijinja-contrib] +version = "2.12.0" +features = ["pycompat"] + +# Utilities to deal with non-zero values +[workspace.dependencies.nonzero_ext] +version = "0.3.0" + +# Open Policy Agent support through WASM +[workspace.dependencies.opa-wasm] +version = "0.1.9" + +# OpenTelemetry +[workspace.dependencies.opentelemetry] +version = "0.31.0" +features = ["trace", "metrics"] +[workspace.dependencies.opentelemetry-http] +version = "0.31.0" +features = ["reqwest"] +[workspace.dependencies.opentelemetry-instrumentation-process] +version = "0.1.2" +[workspace.dependencies.opentelemetry-instrumentation-tokio] +version = "0.1.2" +[workspace.dependencies.opentelemetry-jaeger-propagator] +version = "0.31.0" +[workspace.dependencies.opentelemetry-otlp] +version = "0.31.0" +default-features = false +features = ["trace", "metrics", "http-proto"] +[workspace.dependencies.opentelemetry-prometheus-text-exporter] +version = "0.2.1" +[workspace.dependencies.opentelemetry-resource-detectors] +version = "0.10.0" +[workspace.dependencies.opentelemetry-semantic-conventions] +version = "0.31.0" +features = ["semconv_experimental"] +[workspace.dependencies.opentelemetry-stdout] +version = "0.31.0" +features = ["trace", "metrics"] +[workspace.dependencies.opentelemetry_sdk] +version = "0.31.0" +features = [ + "experimental_trace_batch_span_processor_with_async_runtime", + "experimental_metrics_periodicreader_with_async_runtime", + "rt-tokio", +] +[workspace.dependencies.tracing-opentelemetry] +version = "0.32.0" +default-features = false + +# P256 elliptic curve +[workspace.dependencies.p256] +version = "0.13.2" +features = ["std"] + +# P384 elliptic curve +[workspace.dependencies.p384] +version = "0.13.1" +features = ["std"] + +# Text padding utilities +[workspace.dependencies.pad] +version = "0.1.6" + +# PBKDF2 password hashing +[workspace.dependencies.pbkdf2] +version = "0.12.2" +features = ["password-hash", "std", "simple", "parallel"] + +# PEM encoding/decoding +[workspace.dependencies.pem-rfc7468] +version = "0.7.0" +features = ["std"] + +# Parser generator +[workspace.dependencies.pest] +version = "2.8.3" + +# Pest derive macros +[workspace.dependencies.pest_derive] +version = "2.8.3" + +# Pin projection +[workspace.dependencies.pin-project-lite] +version = "0.2.16" + +# PKCS#1 encoding +[workspace.dependencies.pkcs1] +version = "0.7.5" +features = ["std"] + +# PKCS#8 encoding +[workspace.dependencies.pkcs8] +version = "0.10.2" +features = ["std", "pkcs5", "encryption"] + +# Public Suffix List +[workspace.dependencies.psl] +version = "2.1.162" + +# High-precision clock +[workspace.dependencies.quanta] +version = "0.12.6" + +# Random values +[workspace.dependencies.rand] +version = "0.8.5" +[workspace.dependencies.rand_chacha] +version = "0.3.1" +[workspace.dependencies.rand_core] +version = "0.6.4" + +# Regular expressions +[workspace.dependencies.regex] +version = "1.12.2" + +# High-level HTTP client +[workspace.dependencies.reqwest] +version = "0.12.24" +default-features = false +features = [ + "http2", + "rustls-tls-manual-roots-no-provider", + "charset", + "json", + "socks", +] + +# RSA cryptography +[workspace.dependencies.rsa] +version = "0.9.10" +features = ["std", "pem"] + +# Fast hash algorithm for HashMap +[workspace.dependencies.rustc-hash] +version = "2.1.1" + +# Matrix-related types +[workspace.dependencies.ruma-common] +version = "0.16.0" + +# TLS stack +[workspace.dependencies.rustls] +version = "0.23.35" + +# PKI types for rustls +[workspace.dependencies.rustls-pki-types] +version = "1.13.0" + +# Use platform-specific verifier for TLS +[workspace.dependencies.rustls-platform-verifier] +version = "0.6.1" + +# systemd service status notification +[workspace.dependencies.sd-notify] +version = "0.4.5" + +# JSON Schema generation +[workspace.dependencies.schemars] +version = "0.9.0" +features = ["url2", "chrono04", "preserve_order"] + +# SEC1 encoding format +[workspace.dependencies.sec1] +version = "0.7.3" +features = ["std"] + +# Query builder +[workspace.dependencies.sea-query] +version = "0.32.7" +features = ["derive", "attr", "with-uuid", "with-chrono", "postgres-array"] + +# Query builder +[workspace.dependencies.sea-query-binder] +version = "0.7.0" +features = [ + "sqlx", + "sqlx-postgres", + "with-uuid", + "with-chrono", + "postgres-array", +] + +# Sentry error tracking +[workspace.dependencies.sentry] +version = "0.46.2" +default-features = false +features = ["backtrace", "contexts", "panic", "tower", "reqwest"] + +# Sentry tower layer +[workspace.dependencies.sentry-tower] +version = "0.46.0" +features = ["http", "axum-matched-path"] + +# Sentry tracing integration +[workspace.dependencies.sentry-tracing] +version = "0.46.0" + +# Serialization and deserialization +[workspace.dependencies.serde] +version = "1.0.228" +features = ["derive"] # Most of the time, if we need serde, we need derive + +# JSON serialization and deserialization +[workspace.dependencies.serde_json] +version = "1.0.145" +features = ["preserve_order"] + +# URL encoded form serialization +[workspace.dependencies.serde_urlencoded] +version = "0.7.1" + +# Custom serialization helpers +[workspace.dependencies.serde_with] +version = "3.14.0" +features = ["hex", "chrono"] + +# YAML serialization +[workspace.dependencies.serde_yaml] +version = "0.9.34" + +# SHA-2 cryptographic hash algorithm +[workspace.dependencies.sha2] +version = "0.10.9" +features = ["oid"] + +# Digital signature traits +[workspace.dependencies.signature] +version = "2.2.0" + +# Low-level socket manipulation +[workspace.dependencies.socket2] +version = "0.6.2" + +# Subject Public Key Info +[workspace.dependencies.spki] +version = "0.7.3" +features = ["std"] + +# SQL database support +[workspace.dependencies.sqlx] +version = "0.8.6" +features = [ + "runtime-tokio", + "tls-rustls-aws-lc-rs", + "postgres", + "migrate", + "chrono", + "json", + "uuid", + "ipnetwork", +] + +# Custom error types +[workspace.dependencies.thiserror] +version = "2.0.17" + +[workspace.dependencies.thiserror-ext] +version = "0.3.0" + +# Async runtime +[workspace.dependencies.tokio] +version = "1.48.0" +features = ["full"] + +[workspace.dependencies.tokio-stream] +version = "0.1.17" + +# Tokio rustls integration +[workspace.dependencies.tokio-rustls] +version = "0.26.4" + +# Tokio test utilities +[workspace.dependencies.tokio-test] +version = "0.4.4" + +# Useful async utilities +[workspace.dependencies.tokio-util] +version = "0.7.16" +features = ["rt"] + +# Tower services +[workspace.dependencies.tower] +version = "0.5.2" +features = ["util"] + +# Tower service trait +[workspace.dependencies.tower-service] +version = "0.3.3" + +# Tower layer trait +[workspace.dependencies.tower-layer] +version = "0.3.3" + +# Tower HTTP layers +[workspace.dependencies.tower-http] +version = "0.6.6" +features = ["cors", "fs", "add-extension", "set-header"] + +# Logging and tracing +[workspace.dependencies.tracing] +version = "0.1.41" +[workspace.dependencies.tracing-subscriber] +version = "0.3.22" +features = ["env-filter"] +[workspace.dependencies.tracing-appender] +version = "0.2.4" + +# URL manipulation +[workspace.dependencies.url] +version = "2.5.7" +features = ["serde"] + +# URL encoding +[workspace.dependencies.urlencoding] +version = "2.1.3" + +# ULID support +[workspace.dependencies.ulid] +version = "=1.1.4" # Pinned to the latest version which used rand 0.8 +features = ["serde", "uuid"] + +# UUID support +[workspace.dependencies.uuid] +version = "1.18.1" + +# HTML escaping +[workspace.dependencies.v_htmlescape] +version = "0.15.8" + +# Version information generation +[workspace.dependencies.vergen-gitcl] +version = "1.0.8" +features = ["rustc"] + +# Directory traversal +[workspace.dependencies.walkdir] +version = "2.5.0" + +# HTTP mock server +[workspace.dependencies.wiremock] +version = "0.6.5" + +# User-agent parser +[workspace.dependencies.woothee] +version = "0.13.0" + +# String writing interface +[workspace.dependencies.writeable] +version = "0.5.5" + +# Zero memory after use +[workspace.dependencies.zeroize] +version = "1.8.2" + +# Password strength estimation +[workspace.dependencies.zxcvbn] +version = "3.1.0" + +[profile.release] +codegen-units = 1 # Reduce the number of codegen units to increase optimizations +lto = true # Enable fat LTO + +# A few profile opt-level tweaks to make the test suite run faster +[profile.dev.package] +argon2.opt-level = 3 +bcrypt.opt-level = 3 +block-buffer.opt-level = 3 +cranelift-codegen.opt-level = 3 +digest.opt-level = 3 +hmac.opt-level = 3 +generic-array.opt-level = 3 +num-bigint-dig.opt-level = 3 +pbkdf2.opt-level = 3 +rayon.opt-level = 3 +regalloc2.opt-level = 3 +sha2.opt-level = 3 +sqlx-macros.opt-level = 3 diff --git a/matrix-authentication-service/Dockerfile b/matrix-authentication-service/Dockerfile new file mode 100644 index 00000000..ba2ed16c --- /dev/null +++ b/matrix-authentication-service/Dockerfile @@ -0,0 +1,169 @@ +# syntax = docker/dockerfile:1.21.0 +# Copyright 2025, 2026 Element Creations Ltd. +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +# Builds a minimal image with the binary only. It is multi-arch capable, +# cross-building to aarch64 and x86_64. When cross-compiling, Docker sets two +# implicit BUILDARG: BUILDPLATFORM being the host platform and TARGETPLATFORM +# being the platform being built. + +# The Debian version and version name must be in sync +ARG DEBIAN_VERSION=13 +ARG DEBIAN_VERSION_NAME=trixie +# Keep in sync with .github/workflows/ci.yaml +ARG RUSTC_VERSION=1.93.0 +ARG NODEJS_VERSION=24.13.0 +# Keep in sync with .github/actions/build-policies/action.yml and policies/Makefile +ARG OPA_VERSION=1.13.1 +ARG CARGO_AUDITABLE_VERSION=0.7.2 + +########################################## +## Build stage that builds the frontend ## +########################################## +FROM --platform=${BUILDPLATFORM} docker.io/library/node:${NODEJS_VERSION}-${DEBIAN_VERSION_NAME} AS frontend + +WORKDIR /app/frontend + +COPY ./frontend/.npmrc ./frontend/package.json ./frontend/package-lock.json /app/frontend/ +# Network access: to fetch dependencies +RUN --network=default \ + npm ci + +COPY ./frontend/ /app/frontend/ +COPY ./templates/ /app/templates/ +RUN --network=none \ + npm run build + +# Move the built files +RUN --network=none \ + mkdir -p /share/assets && \ + cp ./dist/manifest.json /share/manifest.json && \ + rm -f ./dist/index.html* ./dist/manifest.json* && \ + cp ./dist/* /share/assets/ + +############################################## +## Build stage that builds the OPA policies ## +############################################## +FROM --platform=${BUILDPLATFORM} docker.io/library/buildpack-deps:${DEBIAN_VERSION_NAME} AS policy + +ARG BUILDOS +ARG BUILDARCH +ARG OPA_VERSION + +# Download Open Policy Agent +ADD --chmod=755 https://github.com/open-policy-agent/opa/releases/download/v${OPA_VERSION}/opa_${BUILDOS}_${BUILDARCH}_static /usr/local/bin/opa + +WORKDIR /app/policies +COPY ./policies /app/policies +RUN --network=none \ + make -B && \ + chmod a+r ./policy.wasm + +######################################## +## Build stage that builds the binary ## +######################################## +FROM --platform=${BUILDPLATFORM} docker.io/library/rust:${RUSTC_VERSION}-${DEBIAN_VERSION_NAME} AS builder + +ARG CARGO_AUDITABLE_VERSION +ARG RUSTC_VERSION + +# Install pinned versions of cargo-auditable +# Network access: to fetch dependencies +RUN --network=default \ + cargo install --locked \ + cargo-auditable@=${CARGO_AUDITABLE_VERSION} + +# Install all cross-compilation targets +# Network access: to download the targets +RUN --network=default \ + rustup target add \ + --toolchain "${RUSTC_VERSION}" \ + x86_64-unknown-linux-gnu \ + aarch64-unknown-linux-gnu + +RUN --network=none \ + dpkg --add-architecture arm64 && \ + dpkg --add-architecture amd64 + +ARG BUILDPLATFORM + +# Install cross-compilation toolchains for all supported targets +# Network access: to install apt packages +RUN --network=default \ + apt-get update && apt-get install -y \ + $(if [ "${BUILDPLATFORM}" != "linux/arm64" ]; then echo "g++-aarch64-linux-gnu"; fi) \ + $(if [ "${BUILDPLATFORM}" != "linux/amd64" ]; then echo "g++-x86-64-linux-gnu"; fi) \ + libc6-dev-amd64-cross \ + libc6-dev-arm64-cross \ + g++ + +# Setup the cross-compilation environment +ENV \ + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc \ + CC_aarch64_unknown_linux_gnu=aarch64-linux-gnu-gcc \ + CXX_aarch64_unknown_linux_gnu=aarch64-linux-gnu-g++ \ + CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER=x86_64-linux-gnu-gcc \ + CC_x86_64_unknown_linux_gnu=x86_64-linux-gnu-gcc \ + CXX_x86_64_unknown_linux_gnu=x86_64-linux-gnu-g++ + +# Set the working directory +WORKDIR /app + +# Copy the code +COPY ./ /app +ENV SQLX_OFFLINE=true + +ARG VERGEN_GIT_DESCRIBE +ENV VERGEN_GIT_DESCRIBE=${VERGEN_GIT_DESCRIBE} + +# Network access: cargo auditable needs it +RUN --network=default \ + --mount=type=cache,target=/root/.cargo/registry \ + --mount=type=cache,target=/app/target \ + cargo auditable build \ + --locked \ + --release \ + --bin mas-cli \ + --no-default-features \ + --features docker \ + --target x86_64-unknown-linux-gnu \ + --target aarch64-unknown-linux-gnu \ + && mv "target/x86_64-unknown-linux-gnu/release/mas-cli" /usr/local/bin/mas-cli-amd64 \ + && mv "target/aarch64-unknown-linux-gnu/release/mas-cli" /usr/local/bin/mas-cli-arm64 + +####################################### +## Prepare /usr/local/share/mas-cli/ ## +####################################### +FROM --platform=${BUILDPLATFORM} scratch AS share + +COPY --from=frontend /share /share +COPY --from=policy /app/policies/policy.wasm /share/policy.wasm +COPY ./templates/ /share/templates +COPY ./translations/ /share/translations + +################################## +## Runtime stage, debug variant ## +################################## +FROM gcr.io/distroless/cc-debian${DEBIAN_VERSION}:debug-nonroot AS debug + +ARG TARGETARCH +COPY --from=builder /usr/local/bin/mas-cli-${TARGETARCH} /usr/local/bin/mas-cli +COPY --from=share /share /usr/local/share/mas-cli + +WORKDIR / +ENTRYPOINT ["/usr/local/bin/mas-cli"] + +################### +## Runtime stage ## +################### +FROM gcr.io/distroless/cc-debian${DEBIAN_VERSION}:nonroot + +ARG TARGETARCH +COPY --from=builder /usr/local/bin/mas-cli-${TARGETARCH} /usr/local/bin/mas-cli +COPY --from=share /share /usr/local/share/mas-cli + +WORKDIR / +ENTRYPOINT ["/usr/local/bin/mas-cli"] diff --git a/matrix-authentication-service/LICENSE b/matrix-authentication-service/LICENSE new file mode 100644 index 00000000..be3f7b28 --- /dev/null +++ b/matrix-authentication-service/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/matrix-authentication-service/LICENSE-COMMERCIAL b/matrix-authentication-service/LICENSE-COMMERCIAL new file mode 100644 index 00000000..173e03e0 --- /dev/null +++ b/matrix-authentication-service/LICENSE-COMMERCIAL @@ -0,0 +1,6 @@ +Licensees holding a valid commercial license with Element may use this +software in accordance with the terms contained in a written agreement +between you and Element. + +To purchase a commercial license please contact our sales team at +licensing@element.io diff --git a/matrix-authentication-service/README.md b/matrix-authentication-service/README.md new file mode 100644 index 00000000..6acc6f6e --- /dev/null +++ b/matrix-authentication-service/README.md @@ -0,0 +1,61 @@ +# Matrix Authentication Service + +MAS (Matrix Authentication Service) is a user management and authentication service for [Matrix](https://matrix.org/) homeservers, written and maintained by [Element](https://element.io/). You can directly run and manage the source code in this repository, available under an AGPL license (or alternatively under a commercial license from Element). Support is not provided by Element unless you have a subscription. + +It has been created to support the migration of Matrix to a next-generation of auth APIs per [MSC3861](https://github.com/matrix-org/matrix-doc/pull/3861). + +See the [Documentation](https://element-hq.github.io/matrix-authentication-service/index.html) for information on installation and use. + +You can learn more about Matrix and next-generation auth at [areweoidcyet.com](https://areweoidcyet.com/). + +## 🚀 Getting started + +This component is developed and maintained by [Element](https://element.io). It gets shipped as part of the **Element Server Suite (ESS)** which provides the official means of deployment. + +ESS is a Matrix distribution from Element with focus on quality and ease of use. It ships a full Matrix stack tailored to the respective use case. + +There are three editions of ESS: + +- [ESS Community](https://github.com/element-hq/ess-helm) - the free Matrix + distribution from Element tailored to small-/mid-scale, non-commercial + community use cases +- [ESS Pro](https://element.io/server-suite) - the commercial Matrix + distribution from Element for professional use +- [ESS TI-M](https://element.io/server-suite/ti-messenger) - a special version + of ESS Pro focused on the requirements of TI-Messenger Pro and ePA as + specified by the German National Digital Health Agency Gematik + +## 💬 Community room + +Developers and users of Matrix Authentication Service can chat in the [#matrix-auth:matrix.org](https://matrix.to/#/#matrix-auth:matrix.org) room on Matrix. + +## 🛠️ Standalone installation and configuration + +The best way to get a modern Element Matrix stack is through the [Element Server Suite](https://element.io/en/server-suite), which includes MAS. + +The MAS documentation describes [how to install and configure MAS](https://element-hq.github.io/matrix-authentication-service/setup/). +We recommend using the [Docker image](https://element-hq.github.io/matrix-authentication-service/setup/installation.html#using-the-docker-image) or the [pre-built binaries](https://element-hq.github.io/matrix-authentication-service/setup/installation.html#pre-built-binaries). + +## 📖 Translations + +Matrix Authentication Service is available in multiple languages. +Anyone can contribute to translations through [Localazy](https://localazy.com/element-matrix-authentication-service/). + +## 🏗️ Contributing + +See the [contribution guidelines](https://element-hq.github.io/matrix-authentication-service/development/contributing.html) for information on how to contribute to this project. + +## ⚖️ Copyright & License + +Copyright 2021-2024 The Matrix.org Foundation C.I.C. + +Copyright 2024, 2025 New Vector Ltd. + +Copyright 2025, 2026 Element Creations Ltd. + +This software is dual-licensed by Element Creations Ltd (Element). It can be used either: + +(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR + +(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to). +Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses. diff --git a/matrix-authentication-service/biome.json b/matrix-authentication-service/biome.json new file mode 100644 index 00000000..cc545416 --- /dev/null +++ b/matrix-authentication-service/biome.json @@ -0,0 +1,60 @@ +{ + "$schema": "https://biomejs.dev/schemas/2.2.4/schema.json", + "assist": { "actions": { "source": { "organizeImports": "on" } } }, + "vcs": { + "enabled": true, + "clientKind": "git", + "useIgnoreFile": true + }, + "files": { + "includes": [ + "**", + "!**/.devcontainer/**", + "!**/docs/**", + "!**/translations/**", + "!**/policies/**", + "!**/crates/**", + "!**/frontend/package.json", + "!**/frontend/src/gql/**", + "!**/frontend/src/routeTree.gen.ts", + "!**/frontend/.storybook/locales.ts", + "!**/frontend/.storybook/public/mockServiceWorker.js", + "!**/frontend/locales/**/*.json", + "!**/coverage/**", + "!**/dist/**" + ] + }, + "formatter": { + "enabled": true, + "useEditorconfig": true + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true, + "complexity": { + "noImportantStyles": "off" + }, + "suspicious": { + "noUnknownAtRules": "off" + }, + "correctness": { + "noUnusedImports": "warn", + "noUnusedVariables": "warn" + }, + "style": { + "noParameterAssign": "error", + "useAsConstAssertion": "error", + "useDefaultParameterLast": "error", + "useEnumInitializers": "error", + "useSelfClosingElements": "error", + "useSingleVarDeclarator": "error", + "noUnusedTemplateLiteral": "error", + "useNumberNamespace": "error", + "noInferrableTypes": "error", + "noUselessElse": "error", + "noDescendingSpecificity": "off" + } + } + } +} diff --git a/matrix-authentication-service/book.toml b/matrix-authentication-service/book.toml new file mode 100644 index 00000000..fa1f0cfe --- /dev/null +++ b/matrix-authentication-service/book.toml @@ -0,0 +1,26 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +# Documentation for possible options in this file is at +# https://rust-lang.github.io/mdBook/format/config.html +[book] +title = "Matrix Authentication Service" +authors = ["Element Backend Team"] +language = "en" + +src = "docs" + +[build] +build-dir = "target/book" + +[output.html] +# The URL visitors will be directed to when they try to edit a page +edit-url-template = "https://github.com/element-hq/matrix-authentication-service/edit/main/{path}" + +# The source code URL of the repository +git-repository-url = "https://github.com/element-hq/matrix-authentication-service" + +# The path that the docs are hosted on +site-url = "/matrix-authentication-service/" diff --git a/matrix-authentication-service/clippy.toml b/matrix-authentication-service/clippy.toml new file mode 100644 index 00000000..db1ba69d --- /dev/null +++ b/matrix-authentication-service/clippy.toml @@ -0,0 +1,22 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +doc-valid-idents = ["OpenID", "OAuth", "UserInfo", "..", "PostgreSQL", "SQLite"] + +disallowed-methods = [ + { path = "rand::thread_rng", reason = "do not create rngs on the fly, pass them as parameters" }, + { path = "chrono::Utc::now", reason = "source the current time from the clock instead" }, + { path = "ulid::Ulid::from_datetime", reason = "use Ulid::from_datetime_with_source instead" }, + { path = "ulid::Ulid::new", reason = "use Ulid::from_datetime_with_source instead" }, + { path = "reqwest::Client::new", reason = "use mas_http::reqwest_client instead" }, + { path = "reqwest::RequestBuilder::send", reason = "use send_traced instead" }, +] + +disallowed-types = [ + { path = "std::path::PathBuf", reason = "use camino::Utf8PathBuf instead" }, + { path = "std::path::Path", reason = "use camino::Utf8Path instead" }, + { path = "axum::extract::Query", reason = "use axum_extra::extract::Query instead. The built-in version doesn't deserialise lists."}, + { path = "axum::extract::rejection::QueryRejection", reason = "use axum_extra::extract::QueryRejection instead"} +] diff --git a/matrix-authentication-service/crates/axum-utils/Cargo.toml b/matrix-authentication-service/crates/axum-utils/Cargo.toml new file mode 100644 index 00000000..ceb47a4c --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/Cargo.toml @@ -0,0 +1,48 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-axum-utils" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +axum.workspace = true +axum-extra.workspace = true +base64ct.workspace = true +chrono.workspace = true +headers.workspace = true +http.workspace = true +icu_locid.workspace = true +mime.workspace = true +rand.workspace = true +reqwest.workspace = true +sentry.workspace = true +serde.workspace = true +serde_with.workspace = true +serde_json.workspace = true +thiserror.workspace = true +tokio.workspace = true +tracing.workspace = true +url.workspace = true +ulid.workspace = true + +oauth2-types.workspace = true +mas-data-model.workspace = true +mas-http.workspace = true +mas-iana.workspace = true +mas-jose.workspace = true +mas-keystore.workspace = true +mas-storage.workspace = true +mas-templates.workspace = true diff --git a/matrix-authentication-service/crates/axum-utils/src/client_authorization.rs b/matrix-authentication-service/crates/axum-utils/src/client_authorization.rs new file mode 100644 index 00000000..65d88585 --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/client_authorization.rs @@ -0,0 +1,739 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashMap; + +use axum::{ + BoxError, Json, + extract::{ + Form, FromRequest, + rejection::{FailedToDeserializeForm, FormRejection}, + }, + response::IntoResponse, +}; +use headers::authorization::{Basic, Bearer, Credentials as _}; +use http::{Request, StatusCode}; +use mas_data_model::{Client, JwksOrJwksUri}; +use mas_http::RequestBuilderExt; +use mas_iana::oauth::OAuthClientAuthenticationMethod; +use mas_jose::{jwk::PublicJsonWebKeySet, jwt::Jwt}; +use mas_keystore::Encrypter; +use mas_storage::{RepositoryAccess, oauth2::OAuth2ClientRepository}; +use oauth2_types::errors::{ClientError, ClientErrorCode}; +use serde::{Deserialize, de::DeserializeOwned}; +use serde_json::Value; +use thiserror::Error; + +use crate::record_error; + +static JWT_BEARER_CLIENT_ASSERTION: &str = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"; + +#[derive(Deserialize)] +struct AuthorizedForm { + client_id: Option, + client_secret: Option, + client_assertion_type: Option, + client_assertion: Option, + + #[serde(flatten)] + inner: F, +} + +#[derive(Debug, PartialEq, Eq)] +pub enum Credentials { + None { + client_id: String, + }, + ClientSecretBasic { + client_id: String, + client_secret: String, + }, + ClientSecretPost { + client_id: String, + client_secret: String, + }, + ClientAssertionJwtBearer { + client_id: String, + jwt: Box>>, + }, + BearerToken { + token: String, + }, +} + +impl Credentials { + /// Get the `client_id` of the credentials + #[must_use] + pub fn client_id(&self) -> Option<&str> { + match self { + Credentials::None { client_id } + | Credentials::ClientSecretBasic { client_id, .. } + | Credentials::ClientSecretPost { client_id, .. } + | Credentials::ClientAssertionJwtBearer { client_id, .. } => Some(client_id), + Credentials::BearerToken { .. } => None, + } + } + + /// Get the bearer token from the credentials. + #[must_use] + pub fn bearer_token(&self) -> Option<&str> { + match self { + Credentials::BearerToken { token } => Some(token), + _ => None, + } + } + + /// Fetch the client from the database + /// + /// # Errors + /// + /// Returns an error if the client could not be found or if the underlying + /// repository errored. + pub async fn fetch( + &self, + repo: &mut impl RepositoryAccess, + ) -> Result, E> { + let client_id = match self { + Credentials::None { client_id } + | Credentials::ClientSecretBasic { client_id, .. } + | Credentials::ClientSecretPost { client_id, .. } + | Credentials::ClientAssertionJwtBearer { client_id, .. } => client_id, + Credentials::BearerToken { .. } => return Ok(None), + }; + + repo.oauth2_client().find_by_client_id(client_id).await + } + + /// Verify credentials presented by the client for authentication + /// + /// # Errors + /// + /// Returns an error if the credentials are invalid. + #[tracing::instrument(skip_all)] + pub async fn verify( + &self, + http_client: &reqwest::Client, + encrypter: &Encrypter, + method: &OAuthClientAuthenticationMethod, + client: &Client, + ) -> Result<(), CredentialsVerificationError> { + match (self, method) { + (Credentials::None { .. }, OAuthClientAuthenticationMethod::None) => {} + + ( + Credentials::ClientSecretPost { client_secret, .. }, + OAuthClientAuthenticationMethod::ClientSecretPost, + ) + | ( + Credentials::ClientSecretBasic { client_secret, .. }, + OAuthClientAuthenticationMethod::ClientSecretBasic, + ) => { + // Decrypt the client_secret + let encrypted_client_secret = client + .encrypted_client_secret + .as_ref() + .ok_or(CredentialsVerificationError::InvalidClientConfig)?; + + let decrypted_client_secret = encrypter + .decrypt_string(encrypted_client_secret) + .map_err(|_e| CredentialsVerificationError::DecryptionError)?; + + // Check if the client_secret matches + if client_secret.as_bytes() != decrypted_client_secret { + return Err(CredentialsVerificationError::ClientSecretMismatch); + } + } + + ( + Credentials::ClientAssertionJwtBearer { jwt, .. }, + OAuthClientAuthenticationMethod::PrivateKeyJwt, + ) => { + // Get the client JWKS + let jwks = client + .jwks + .as_ref() + .ok_or(CredentialsVerificationError::InvalidClientConfig)?; + + let jwks = fetch_jwks(http_client, jwks) + .await + .map_err(CredentialsVerificationError::JwksFetchFailed)?; + + jwt.verify_with_jwks(&jwks) + .map_err(|_| CredentialsVerificationError::InvalidAssertionSignature)?; + } + + ( + Credentials::ClientAssertionJwtBearer { jwt, .. }, + OAuthClientAuthenticationMethod::ClientSecretJwt, + ) => { + // Decrypt the client_secret + let encrypted_client_secret = client + .encrypted_client_secret + .as_ref() + .ok_or(CredentialsVerificationError::InvalidClientConfig)?; + + let decrypted_client_secret = encrypter + .decrypt_string(encrypted_client_secret) + .map_err(|_e| CredentialsVerificationError::DecryptionError)?; + + jwt.verify_with_shared_secret(decrypted_client_secret) + .map_err(|_| CredentialsVerificationError::InvalidAssertionSignature)?; + } + + (_, _) => { + return Err(CredentialsVerificationError::AuthenticationMethodMismatch); + } + } + Ok(()) + } +} + +async fn fetch_jwks( + http_client: &reqwest::Client, + jwks: &JwksOrJwksUri, +) -> Result { + let uri = match jwks { + JwksOrJwksUri::Jwks(j) => return Ok(j.clone()), + JwksOrJwksUri::JwksUri(u) => u, + }; + + let response = http_client + .get(uri.as_str()) + .send_traced() + .await? + .error_for_status()? + .json() + .await?; + + Ok(response) +} + +#[derive(Debug, Error)] +pub enum CredentialsVerificationError { + #[error("failed to decrypt client credentials")] + DecryptionError, + + #[error("invalid client configuration")] + InvalidClientConfig, + + #[error("client secret did not match")] + ClientSecretMismatch, + + #[error("authentication method mismatch")] + AuthenticationMethodMismatch, + + #[error("invalid assertion signature")] + InvalidAssertionSignature, + + #[error("failed to fetch jwks")] + JwksFetchFailed(#[source] Box), +} + +impl CredentialsVerificationError { + /// Returns true if the error is an internal error, not caused by the client + #[must_use] + pub fn is_internal(&self) -> bool { + matches!( + self, + Self::DecryptionError | Self::InvalidClientConfig | Self::JwksFetchFailed(_) + ) + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ClientAuthorization { + pub credentials: Credentials, + pub form: Option, +} + +impl ClientAuthorization { + /// Get the `client_id` from the credentials. + #[must_use] + pub fn client_id(&self) -> Option<&str> { + self.credentials.client_id() + } +} + +#[derive(Debug, Error)] +pub enum ClientAuthorizationError { + #[error("Invalid Authorization header")] + InvalidHeader, + + #[error("Could not deserialize request body")] + BadForm(#[source] FailedToDeserializeForm), + + #[error("client_id in form ({form:?}) does not match credential ({credential:?})")] + ClientIdMismatch { credential: String, form: String }, + + #[error("Unsupported client_assertion_type: {client_assertion_type}")] + UnsupportedClientAssertion { client_assertion_type: String }, + + #[error("No credentials were presented")] + MissingCredentials, + + #[error("Invalid request")] + InvalidRequest, + + #[error("Invalid client_assertion")] + InvalidAssertion, + + #[error(transparent)] + Internal(Box), +} + +impl IntoResponse for ClientAuthorizationError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!(self, Self::Internal(_)); + match &self { + ClientAuthorizationError::InvalidHeader => ( + StatusCode::BAD_REQUEST, + sentry_event_id, + Json(ClientError::new( + ClientErrorCode::InvalidRequest, + "Invalid Authorization header", + )), + ), + + ClientAuthorizationError::BadForm(err) => ( + StatusCode::BAD_REQUEST, + sentry_event_id, + Json( + ClientError::from(ClientErrorCode::InvalidRequest) + .with_description(format!("{err}")), + ), + ), + + ClientAuthorizationError::ClientIdMismatch { .. } => ( + StatusCode::BAD_REQUEST, + sentry_event_id, + Json( + ClientError::from(ClientErrorCode::InvalidGrant) + .with_description(format!("{self}")), + ), + ), + + ClientAuthorizationError::UnsupportedClientAssertion { .. } => ( + StatusCode::BAD_REQUEST, + sentry_event_id, + Json( + ClientError::from(ClientErrorCode::InvalidRequest) + .with_description(format!("{self}")), + ), + ), + + ClientAuthorizationError::MissingCredentials => ( + StatusCode::BAD_REQUEST, + sentry_event_id, + Json(ClientError::new( + ClientErrorCode::InvalidRequest, + "No credentials were presented", + )), + ), + + ClientAuthorizationError::InvalidRequest => ( + StatusCode::BAD_REQUEST, + sentry_event_id, + Json(ClientError::from(ClientErrorCode::InvalidRequest)), + ), + + ClientAuthorizationError::InvalidAssertion => ( + StatusCode::BAD_REQUEST, + sentry_event_id, + Json(ClientError::new( + ClientErrorCode::InvalidRequest, + "Invalid client_assertion", + )), + ), + + ClientAuthorizationError::Internal(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + sentry_event_id, + Json( + ClientError::from(ClientErrorCode::ServerError) + .with_description(format!("{e}")), + ), + ), + } + .into_response() + } +} + +impl FromRequest for ClientAuthorization +where + F: DeserializeOwned, + S: Send + Sync, +{ + type Rejection = ClientAuthorizationError; + + async fn from_request( + req: Request, + state: &S, + ) -> Result { + enum Authorization { + Basic(String, String), + Bearer(String), + } + + // Sadly, the typed-header 'Authorization' doesn't let us check for both + // Basic and Bearer at the same time, so we need to parse them manually + let authorization = if let Some(header) = req.headers().get(http::header::AUTHORIZATION) { + let bytes = header.as_bytes(); + if bytes.len() >= 6 && bytes[..6].eq_ignore_ascii_case(b"Basic ") { + let Some(decoded) = Basic::decode(header) else { + return Err(ClientAuthorizationError::InvalidHeader); + }; + + Some(Authorization::Basic( + decoded.username().to_owned(), + decoded.password().to_owned(), + )) + } else if bytes.len() >= 7 && bytes[..7].eq_ignore_ascii_case(b"Bearer ") { + let Some(decoded) = Bearer::decode(header) else { + return Err(ClientAuthorizationError::InvalidHeader); + }; + + Some(Authorization::Bearer(decoded.token().to_owned())) + } else { + return Err(ClientAuthorizationError::InvalidHeader); + } + } else { + None + }; + + // Take the form value + let ( + client_id_from_form, + client_secret_from_form, + client_assertion_type, + client_assertion, + form, + ) = match Form::>::from_request(req, state).await { + Ok(Form(form)) => ( + form.client_id, + form.client_secret, + form.client_assertion_type, + form.client_assertion, + Some(form.inner), + ), + // If it is not a form, continue + Err(FormRejection::InvalidFormContentType(_err)) => (None, None, None, None, None), + // If the form could not be read, return a Bad Request error + Err(FormRejection::FailedToDeserializeForm(err)) => { + return Err(ClientAuthorizationError::BadForm(err)); + } + // Other errors (body read twice, byte stream broke) return an internal error + Err(e) => return Err(ClientAuthorizationError::Internal(Box::new(e))), + }; + + // And now, figure out the actual auth method + let credentials = match ( + authorization, + client_id_from_form, + client_secret_from_form, + client_assertion_type, + client_assertion, + ) { + ( + Some(Authorization::Basic(client_id, client_secret)), + client_id_from_form, + None, + None, + None, + ) => { + if let Some(client_id_from_form) = client_id_from_form { + // If the client_id was in the body, verify it matches with the header + if client_id != client_id_from_form { + return Err(ClientAuthorizationError::ClientIdMismatch { + credential: client_id, + form: client_id_from_form, + }); + } + } + + Credentials::ClientSecretBasic { + client_id, + client_secret, + } + } + + (None, Some(client_id), Some(client_secret), None, None) => { + // Got both client_id and client_secret from the form + Credentials::ClientSecretPost { + client_id, + client_secret, + } + } + + (None, Some(client_id), None, None, None) => { + // Only got a client_id in the form + Credentials::None { client_id } + } + + ( + None, + client_id_from_form, + None, + Some(client_assertion_type), + Some(client_assertion), + ) if client_assertion_type == JWT_BEARER_CLIENT_ASSERTION => { + // Got a JWT bearer client_assertion + let jwt: Jwt<'static, HashMap> = Jwt::try_from(client_assertion) + .map_err(|_| ClientAuthorizationError::InvalidAssertion)?; + + let client_id = if let Some(Value::String(client_id)) = jwt.payload().get("sub") { + client_id.clone() + } else { + return Err(ClientAuthorizationError::InvalidAssertion); + }; + + if let Some(client_id_from_form) = client_id_from_form { + // If the client_id was in the body, verify it matches the one in the JWT + if client_id != client_id_from_form { + return Err(ClientAuthorizationError::ClientIdMismatch { + credential: client_id, + form: client_id_from_form, + }); + } + } + + Credentials::ClientAssertionJwtBearer { + client_id, + jwt: Box::new(jwt), + } + } + + (None, None, None, Some(client_assertion_type), Some(_client_assertion)) => { + // Got another unsupported client_assertion + return Err(ClientAuthorizationError::UnsupportedClientAssertion { + client_assertion_type, + }); + } + + (Some(Authorization::Bearer(token)), None, None, None, None) => { + // Got a bearer token + Credentials::BearerToken { token } + } + + (None, None, None, None, None) => { + // Special case when there are no credentials anywhere + return Err(ClientAuthorizationError::MissingCredentials); + } + + _ => { + // Every other combination is an invalid request + return Err(ClientAuthorizationError::InvalidRequest); + } + }; + + Ok(ClientAuthorization { credentials, form }) + } +} + +#[cfg(test)] +mod tests { + use axum::body::Body; + use http::{Method, Request}; + + use super::*; + + #[tokio::test] + async fn none_test() { + let req = Request::builder() + .method(Method::POST) + .header( + http::header::CONTENT_TYPE, + mime::APPLICATION_WWW_FORM_URLENCODED.as_ref(), + ) + .body(Body::new("client_id=client-id&foo=bar".to_owned())) + .unwrap(); + + assert_eq!( + ClientAuthorization::::from_request(req, &()) + .await + .unwrap(), + ClientAuthorization { + credentials: Credentials::None { + client_id: "client-id".to_owned(), + }, + form: Some(serde_json::json!({"foo": "bar"})), + } + ); + } + + #[tokio::test] + async fn client_secret_basic_test() { + let req = Request::builder() + .method(Method::POST) + .header( + http::header::CONTENT_TYPE, + mime::APPLICATION_WWW_FORM_URLENCODED.as_ref(), + ) + .header( + http::header::AUTHORIZATION, + "Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=", + ) + .body(Body::new("foo=bar".to_owned())) + .unwrap(); + + assert_eq!( + ClientAuthorization::::from_request(req, &()) + .await + .unwrap(), + ClientAuthorization { + credentials: Credentials::ClientSecretBasic { + client_id: "client-id".to_owned(), + client_secret: "client-secret".to_owned(), + }, + form: Some(serde_json::json!({"foo": "bar"})), + } + ); + + // client_id in both header and body + let req = Request::builder() + .method(Method::POST) + .header( + http::header::CONTENT_TYPE, + mime::APPLICATION_WWW_FORM_URLENCODED.as_ref(), + ) + .header( + http::header::AUTHORIZATION, + "Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=", + ) + .body(Body::new("client_id=client-id&foo=bar".to_owned())) + .unwrap(); + + assert_eq!( + ClientAuthorization::::from_request(req, &()) + .await + .unwrap(), + ClientAuthorization { + credentials: Credentials::ClientSecretBasic { + client_id: "client-id".to_owned(), + client_secret: "client-secret".to_owned(), + }, + form: Some(serde_json::json!({"foo": "bar"})), + } + ); + + // client_id in both header and body mismatch + let req = Request::builder() + .method(Method::POST) + .header( + http::header::CONTENT_TYPE, + mime::APPLICATION_WWW_FORM_URLENCODED.as_ref(), + ) + .header( + http::header::AUTHORIZATION, + "Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=", + ) + .body(Body::new("client_id=mismatch-id&foo=bar".to_owned())) + .unwrap(); + + assert!(matches!( + ClientAuthorization::::from_request(req, &()).await, + Err(ClientAuthorizationError::ClientIdMismatch { .. }), + )); + + // Invalid header + let req = Request::builder() + .method(Method::POST) + .header( + http::header::CONTENT_TYPE, + mime::APPLICATION_WWW_FORM_URLENCODED.as_ref(), + ) + .header(http::header::AUTHORIZATION, "Basic invalid") + .body(Body::new("foo=bar".to_owned())) + .unwrap(); + + assert!(matches!( + ClientAuthorization::::from_request(req, &()).await, + Err(ClientAuthorizationError::InvalidHeader), + )); + } + + #[tokio::test] + async fn client_secret_post_test() { + let req = Request::builder() + .method(Method::POST) + .header( + http::header::CONTENT_TYPE, + mime::APPLICATION_WWW_FORM_URLENCODED.as_ref(), + ) + .body(Body::new( + "client_id=client-id&client_secret=client-secret&foo=bar".to_owned(), + )) + .unwrap(); + + assert_eq!( + ClientAuthorization::::from_request(req, &()) + .await + .unwrap(), + ClientAuthorization { + credentials: Credentials::ClientSecretPost { + client_id: "client-id".to_owned(), + client_secret: "client-secret".to_owned(), + }, + form: Some(serde_json::json!({"foo": "bar"})), + } + ); + } + + #[tokio::test] + async fn client_assertion_test() { + // Signed with client_secret = "client-secret" + let jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJjbGllbnQtaWQiLCJzdWIiOiJjbGllbnQtaWQiLCJhdWQiOiJodHRwczovL2V4YW1wbGUuY29tL29hdXRoMi9pbnRyb3NwZWN0IiwianRpIjoiYWFiYmNjIiwiZXhwIjoxNTE2MjM5MzIyLCJpYXQiOjE1MTYyMzkwMjJ9.XTaACG_Rww0GPecSZvkbem-AczNy9LLNBueCLCiQajU"; + let body = Body::new(format!( + "client_assertion_type={JWT_BEARER_CLIENT_ASSERTION}&client_assertion={jwt}&foo=bar", + )); + + let req = Request::builder() + .method(Method::POST) + .header( + http::header::CONTENT_TYPE, + mime::APPLICATION_WWW_FORM_URLENCODED.as_ref(), + ) + .body(body) + .unwrap(); + + let authz = ClientAuthorization::::from_request(req, &()) + .await + .unwrap(); + assert_eq!(authz.form, Some(serde_json::json!({"foo": "bar"}))); + + let Credentials::ClientAssertionJwtBearer { client_id, jwt } = authz.credentials else { + panic!("expected a JWT client_assertion"); + }; + + assert_eq!(client_id, "client-id"); + jwt.verify_with_shared_secret(b"client-secret".to_vec()) + .unwrap(); + } + + #[tokio::test] + async fn bearer_token_test() { + let req = Request::builder() + .method(Method::POST) + .header( + http::header::CONTENT_TYPE, + mime::APPLICATION_WWW_FORM_URLENCODED.as_ref(), + ) + .header(http::header::AUTHORIZATION, "Bearer token") + .body(Body::new("foo=bar".to_owned())) + .unwrap(); + + assert_eq!( + ClientAuthorization::::from_request(req, &()) + .await + .unwrap(), + ClientAuthorization { + credentials: Credentials::BearerToken { + token: "token".to_owned(), + }, + form: Some(serde_json::json!({"foo": "bar"})), + } + ); + } +} diff --git a/matrix-authentication-service/crates/axum-utils/src/cookies.rs b/matrix-authentication-service/crates/axum-utils/src/cookies.rs new file mode 100644 index 00000000..97f1db83 --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/cookies.rs @@ -0,0 +1,169 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Private (encrypted) cookie jar, based on axum-extra's cookie jar + +use std::convert::Infallible; + +use axum::{ + extract::{FromRef, FromRequestParts}, + response::{IntoResponseParts, ResponseParts}, +}; +use axum_extra::extract::cookie::{Cookie, Key, PrivateCookieJar, SameSite}; +use http::request::Parts; +use serde::{Serialize, de::DeserializeOwned}; +use thiserror::Error; +use url::Url; + +#[derive(Debug, Error)] +#[error("could not decode cookie")] +pub enum CookieDecodeError { + Deserialize(#[from] serde_json::Error), +} + +/// Manages cookie options and encryption key +/// +/// This is meant to be accessible through axum's state via the [`FromRef`] +/// trait +#[derive(Clone)] +pub struct CookieManager { + options: CookieOption, + key: Key, +} + +impl CookieManager { + #[must_use] + pub const fn new(base_url: Url, key: Key) -> Self { + let options = CookieOption::new(base_url); + Self { options, key } + } + + #[must_use] + pub fn derive_from(base_url: Url, key: &[u8]) -> Self { + let key = Key::derive_from(key); + Self::new(base_url, key) + } + + #[must_use] + pub fn cookie_jar(&self) -> CookieJar { + let inner = PrivateCookieJar::new(self.key.clone()); + let options = self.options.clone(); + + CookieJar { inner, options } + } + + #[must_use] + pub fn cookie_jar_from_headers(&self, headers: &http::HeaderMap) -> CookieJar { + let inner = PrivateCookieJar::from_headers(headers, self.key.clone()); + let options = self.options.clone(); + + CookieJar { inner, options } + } +} + +impl FromRequestParts for CookieJar +where + CookieManager: FromRef, + S: Send + Sync, +{ + type Rejection = Infallible; + + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + let cookie_manager = CookieManager::from_ref(state); + Ok(cookie_manager.cookie_jar_from_headers(&parts.headers)) + } +} + +#[derive(Debug, Clone)] +struct CookieOption { + base_url: Url, +} + +impl CookieOption { + const fn new(base_url: Url) -> Self { + Self { base_url } + } + + fn secure(&self) -> bool { + self.base_url.scheme() == "https" + } + + fn path(&self) -> &str { + self.base_url.path() + } + + fn apply<'a>(&self, mut cookie: Cookie<'a>) -> Cookie<'a> { + cookie.set_http_only(true); + cookie.set_secure(self.secure()); + cookie.set_path(self.path().to_owned()); + cookie.set_same_site(SameSite::Lax); + cookie + } +} + +/// A cookie jar which encrypts cookies & sets secure options +pub struct CookieJar { + inner: PrivateCookieJar, + options: CookieOption, +} + +impl CookieJar { + /// Save the given payload in a cookie + /// + /// If `permanent` is true, the cookie will be valid for 10 years + /// + /// # Panics + /// + /// Panics if the payload cannot be serialized + #[must_use] + pub fn save(mut self, key: &str, payload: &T, permanent: bool) -> Self { + let serialized = + serde_json::to_string(payload).expect("failed to serialize cookie payload"); + + let cookie = Cookie::new(key.to_owned(), serialized); + let mut cookie = self.options.apply(cookie); + + if permanent { + // XXX: this should use a clock + cookie.make_permanent(); + } + + self.inner = self.inner.add(cookie); + + self + } + + /// Remove a cookie from the jar + #[must_use] + pub fn remove(mut self, key: &str) -> Self { + self.inner = self.inner.remove(key.to_owned()); + self + } + + /// Load and deserialize a cookie from the jar + /// + /// Returns `None` if the cookie is not present + /// + /// # Errors + /// + /// Returns an error if the cookie cannot be deserialized + pub fn load(&self, key: &str) -> Result, CookieDecodeError> { + let Some(cookie) = self.inner.get(key) else { + return Ok(None); + }; + + let decoded = serde_json::from_str(cookie.value())?; + Ok(Some(decoded)) + } +} + +impl IntoResponseParts for CookieJar { + type Error = Infallible; + + fn into_response_parts(self, res: ResponseParts) -> Result { + self.inner.into_response_parts(res) + } +} diff --git a/matrix-authentication-service/crates/axum-utils/src/csrf.rs b/matrix-authentication-service/crates/axum-utils/src/csrf.rs new file mode 100644 index 00000000..a4ef49b7 --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/csrf.rs @@ -0,0 +1,165 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use base64ct::{Base64UrlUnpadded, Encoding}; +use chrono::{DateTime, Duration, Utc}; +use mas_data_model::Clock; +use rand::{Rng, RngCore, distributions::Standard, prelude::Distribution as _}; +use serde::{Deserialize, Serialize}; +use serde_with::{TimestampSeconds, serde_as}; +use thiserror::Error; + +use crate::cookies::{CookieDecodeError, CookieJar}; + +/// Failed to validate CSRF token +#[derive(Debug, Error)] +pub enum CsrfError { + /// The token in the form did not match the token in the cookie + #[error("CSRF token mismatch")] + Mismatch, + + /// The token in the form did not match the token in the cookie + #[error("Missing CSRF cookie")] + Missing, + + /// Failed to decode the token + #[error("could not decode CSRF cookie")] + DecodeCookie(#[from] CookieDecodeError), + + /// The token expired + #[error("CSRF token expired")] + Expired, + + /// Failed to decode the token + #[error("could not decode CSRF token")] + Decode(#[from] base64ct::Error), +} + +/// A CSRF token +#[serde_as] +#[derive(Serialize, Deserialize, Debug)] +pub struct CsrfToken { + #[serde_as(as = "TimestampSeconds")] + expiration: DateTime, + token: [u8; 32], +} + +impl CsrfToken { + /// Create a new token from a defined value valid for a specified duration + fn new(token: [u8; 32], now: DateTime, ttl: Duration) -> Self { + let expiration = now + ttl; + Self { expiration, token } + } + + /// Generate a new random token valid for a specified duration + fn generate(now: DateTime, mut rng: impl Rng, ttl: Duration) -> Self { + let token = Standard.sample(&mut rng); + Self::new(token, now, ttl) + } + + /// Generate a new token with the same value but an up to date expiration + fn refresh(self, now: DateTime, ttl: Duration) -> Self { + Self::new(self.token, now, ttl) + } + + /// Get the value to include in HTML forms + #[must_use] + pub fn form_value(&self) -> String { + Base64UrlUnpadded::encode_string(&self.token[..]) + } + + /// Verifies that the value got from an HTML form matches this token + /// + /// # Errors + /// + /// Returns an error if the value in the form does not match this token + pub fn verify_form_value(&self, form_value: &str) -> Result<(), CsrfError> { + let form_value = Base64UrlUnpadded::decode_vec(form_value)?; + if self.token[..] == form_value { + Ok(()) + } else { + Err(CsrfError::Mismatch) + } + } + + fn verify_expiration(self, now: DateTime) -> Result { + if now < self.expiration { + Ok(self) + } else { + Err(CsrfError::Expired) + } + } +} + +// A CSRF-protected form +#[derive(Deserialize)] +pub struct ProtectedForm { + csrf: String, + + #[serde(flatten)] + inner: T, +} + +pub trait CsrfExt { + /// Get the current CSRF token out of the cookie jar, generating a new one + /// if necessary + fn csrf_token(self, clock: &C, rng: R) -> (CsrfToken, Self) + where + R: RngCore, + C: Clock; + + /// Verify that the given CSRF-protected form is valid, returning the inner + /// value + /// + /// # Errors + /// + /// Returns an error if the CSRF cookie is missing or if the value in the + /// form is invalid + fn verify_form(&self, clock: &C, form: ProtectedForm) -> Result + where + C: Clock; +} + +impl CsrfExt for CookieJar { + fn csrf_token(self, clock: &C, rng: R) -> (CsrfToken, Self) + where + R: RngCore, + C: Clock, + { + let now = clock.now(); + let maybe_token = match self.load::("csrf") { + Ok(Some(token)) => { + let token = token.verify_expiration(now); + + // If the token is expired, just ignore it + token.ok() + } + Ok(None) => None, + Err(e) => { + tracing::warn!("Failed to decode CSRF cookie: {}", e); + None + } + }; + + let token = maybe_token.map_or_else( + || CsrfToken::generate(now, rng, Duration::try_hours(1).unwrap()), + |token| token.refresh(now, Duration::try_hours(1).unwrap()), + ); + + let jar = self.save("csrf", &token, false); + (token, jar) + } + + fn verify_form(&self, clock: &C, form: ProtectedForm) -> Result + where + C: Clock, + { + let token: CsrfToken = self.load("csrf")?.ok_or(CsrfError::Missing)?; + let token = token.verify_expiration(clock.now())?; + token.verify_form_value(&form.csrf)?; + Ok(form.inner) + } +} diff --git a/matrix-authentication-service/crates/axum-utils/src/error_wrapper.rs b/matrix-authentication-service/crates/axum-utils/src/error_wrapper.rs new file mode 100644 index 00000000..0865768d --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/error_wrapper.rs @@ -0,0 +1,23 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::response::{IntoResponse, Response}; + +use crate::InternalError; + +/// A simple wrapper around an error that implements [`IntoResponse`]. +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct ErrorWrapper(#[from] pub T); + +impl IntoResponse for ErrorWrapper +where + T: std::error::Error + 'static, +{ + fn into_response(self) -> Response { + InternalError::from(self.0).into_response() + } +} diff --git a/matrix-authentication-service/crates/axum-utils/src/fancy_error.rs b/matrix-authentication-service/crates/axum-utils/src/fancy_error.rs new file mode 100644 index 00000000..cb6d4e5e --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/fancy_error.rs @@ -0,0 +1,105 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + Extension, + http::StatusCode, + response::{IntoResponse, Response}, +}; +use axum_extra::typed_header::TypedHeader; +use headers::ContentType; +use mas_templates::ErrorContext; + +use crate::sentry::SentryEventID; + +fn build_context(mut err: &dyn std::error::Error) -> ErrorContext { + let description = err.to_string(); + let mut details = Vec::new(); + while let Some(source) = err.source() { + err = source; + details.push(err.to_string()); + } + + ErrorContext::new() + .with_description(description) + .with_details(details.join("\n")) +} + +pub struct GenericError { + error: Box, + code: StatusCode, +} + +impl IntoResponse for GenericError { + fn into_response(self) -> Response { + tracing::warn!(message = &*self.error); + let context = build_context(&*self.error); + let context_text = format!("{context}"); + + ( + self.code, + TypedHeader(ContentType::text()), + Extension(context), + context_text, + ) + .into_response() + } +} + +impl GenericError { + pub fn new(code: StatusCode, err: impl std::error::Error + 'static) -> Self { + Self { + error: Box::new(err), + code, + } + } +} + +pub struct InternalError { + error: Box, +} + +impl IntoResponse for InternalError { + fn into_response(self) -> Response { + tracing::error!(message = &*self.error); + let event_id = SentryEventID::for_last_event(); + let context = build_context(&*self.error); + let context_text = format!("{context}"); + + ( + StatusCode::INTERNAL_SERVER_ERROR, + TypedHeader(ContentType::text()), + event_id, + Extension(context), + context_text, + ) + .into_response() + } +} + +impl From for InternalError { + fn from(err: E) -> Self { + Self { + error: Box::new(err), + } + } +} + +impl InternalError { + /// Create a new error from a boxed error + #[must_use] + pub fn new(error: Box) -> Self { + Self { error } + } + + /// Create a new error from an [`anyhow::Error`] + #[must_use] + pub fn from_anyhow(err: anyhow::Error) -> Self { + Self { + error: err.into_boxed_dyn_error(), + } + } +} diff --git a/matrix-authentication-service/crates/axum-utils/src/jwt.rs b/matrix-authentication-service/crates/axum-utils/src/jwt.rs new file mode 100644 index 00000000..f7747828 --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/jwt.rs @@ -0,0 +1,21 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::response::{IntoResponse, Response}; +use axum_extra::typed_header::TypedHeader; +use headers::ContentType; +use mas_jose::jwt::Jwt; +use mime::Mime; + +pub struct JwtResponse(pub Jwt<'static, T>); + +impl IntoResponse for JwtResponse { + fn into_response(self) -> Response { + let application_jwt: Mime = "application/jwt".parse().unwrap(); + let content_type = ContentType::from(application_jwt); + (TypedHeader(content_type), self.0.into_string()).into_response() + } +} diff --git a/matrix-authentication-service/crates/axum-utils/src/language_detection.rs b/matrix-authentication-service/crates/axum-utils/src/language_detection.rs new file mode 100644 index 00000000..057453a0 --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/language_detection.rs @@ -0,0 +1,280 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::cmp::Reverse; + +use headers::{Error, Header}; +use http::{HeaderName, HeaderValue, header::ACCEPT_LANGUAGE}; +use icu_locid::Locale; + +#[derive(PartialEq, Eq, Debug)] +struct AcceptLanguagePart { + // None means * + locale: Option, + + // Quality is between 0 and 1 with 3 decimal places + // Which we map from 0 to 1000, e.g. 0.5 becomes 500 + quality: u16, +} + +impl PartialOrd for AcceptLanguagePart { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for AcceptLanguagePart { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + // When comparing two AcceptLanguage structs, we only consider the + // quality, in reverse. + Reverse(self.quality).cmp(&Reverse(other.quality)) + } +} + +/// A header that represents the `Accept-Language` header. +#[derive(PartialEq, Eq, Debug)] +pub struct AcceptLanguage { + parts: Vec, +} + +impl AcceptLanguage { + pub fn iter(&self) -> impl Iterator { + // This should stop when we hit the first None, aka the first * + self.parts.iter().map_while(|item| item.locale.as_ref()) + } +} + +/// Utility to trim ASCII whitespace from the start and end of a byte slice +const fn trim_bytes(mut bytes: &[u8]) -> &[u8] { + // Trim leading and trailing whitespace + while let [first, rest @ ..] = bytes { + if first.is_ascii_whitespace() { + bytes = rest; + } else { + break; + } + } + + while let [rest @ .., last] = bytes { + if last.is_ascii_whitespace() { + bytes = rest; + } else { + break; + } + } + + bytes +} + +impl Header for AcceptLanguage { + fn name() -> &'static HeaderName { + &ACCEPT_LANGUAGE + } + + fn decode<'i, I>(values: &mut I) -> Result + where + Self: Sized, + I: Iterator, + { + let mut parts = Vec::new(); + for value in values { + for part in value.as_bytes().split(|b| *b == b',') { + let mut it = part.split(|b| *b == b';'); + let locale = it.next().ok_or(Error::invalid())?; + let locale = trim_bytes(locale); + + let locale = match locale { + b"*" => None, + locale => { + let locale = + Locale::try_from_bytes(locale).map_err(|_e| Error::invalid())?; + Some(locale) + } + }; + + let quality = if let Some(quality) = it.next() { + let quality = trim_bytes(quality); + let quality = quality.strip_prefix(b"q=").ok_or(Error::invalid())?; + let quality = std::str::from_utf8(quality).map_err(|_e| Error::invalid())?; + let quality = quality.parse::().map_err(|_e| Error::invalid())?; + // Bound the quality between 0 and 1 + let quality = quality.clamp(0_f64, 1_f64); + + // Make sure the iterator is empty + if it.next().is_some() { + return Err(Error::invalid()); + } + + #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)] + { + f64::round(quality * 1000_f64) as u16 + } + } else { + 1000 + }; + + parts.push(AcceptLanguagePart { locale, quality }); + } + } + + parts.sort(); + + Ok(AcceptLanguage { parts }) + } + + fn encode>(&self, values: &mut E) { + let mut value = String::new(); + let mut first = true; + for part in &self.parts { + if first { + first = false; + } else { + value.push_str(", "); + } + + if let Some(locale) = &part.locale { + value.push_str(&locale.to_string()); + } else { + value.push('*'); + } + + if part.quality != 1000 { + value.push_str(";q="); + value.push_str(&(f64::from(part.quality) / 1000_f64).to_string()); + } + } + + // We know this is safe because we only use ASCII characters + values.extend(Some(HeaderValue::from_str(&value).unwrap())); + } +} + +#[cfg(test)] +mod tests { + use headers::HeaderMapExt; + use http::{HeaderMap, HeaderValue, header::ACCEPT_LANGUAGE}; + use icu_locid::locale; + + use super::*; + + #[test] + fn test_decode() { + let headers = HeaderMap::from_iter([( + ACCEPT_LANGUAGE, + HeaderValue::from_str("fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5").unwrap(), + )]); + + let accept_language: Option = headers.typed_get(); + assert!(accept_language.is_some()); + let accept_language = accept_language.unwrap(); + + assert_eq!( + accept_language, + AcceptLanguage { + parts: vec![ + AcceptLanguagePart { + locale: Some(locale!("fr-CH")), + quality: 1000, + }, + AcceptLanguagePart { + locale: Some(locale!("fr")), + quality: 900, + }, + AcceptLanguagePart { + locale: Some(locale!("en")), + quality: 800, + }, + AcceptLanguagePart { + locale: Some(locale!("de")), + quality: 700, + }, + AcceptLanguagePart { + locale: None, + quality: 500, + }, + ] + } + ); + } + + #[test] + /// Test that we can decode a header with multiple values unordered, and + /// that the output is ordered by quality + fn test_decode_order() { + let headers = HeaderMap::from_iter([( + ACCEPT_LANGUAGE, + HeaderValue::from_str("*;q=0.5, fr-CH, en;q=0.8, fr;q=0.9, de;q=0.9").unwrap(), + )]); + + let accept_language: Option = headers.typed_get(); + assert!(accept_language.is_some()); + let accept_language = accept_language.unwrap(); + + assert_eq!( + accept_language, + AcceptLanguage { + parts: vec![ + AcceptLanguagePart { + locale: Some(locale!("fr-CH")), + quality: 1000, + }, + AcceptLanguagePart { + locale: Some(locale!("fr")), + quality: 900, + }, + AcceptLanguagePart { + locale: Some(locale!("de")), + quality: 900, + }, + AcceptLanguagePart { + locale: Some(locale!("en")), + quality: 800, + }, + AcceptLanguagePart { + locale: None, + quality: 500, + }, + ] + } + ); + } + + #[test] + fn test_encode() { + let accept_language = AcceptLanguage { + parts: vec![ + AcceptLanguagePart { + locale: Some(locale!("fr-CH")), + quality: 1000, + }, + AcceptLanguagePart { + locale: Some(locale!("fr")), + quality: 900, + }, + AcceptLanguagePart { + locale: Some(locale!("de")), + quality: 900, + }, + AcceptLanguagePart { + locale: Some(locale!("en")), + quality: 800, + }, + AcceptLanguagePart { + locale: None, + quality: 500, + }, + ], + }; + + let mut headers = HeaderMap::new(); + headers.typed_insert(accept_language); + let header = headers.get(ACCEPT_LANGUAGE).unwrap(); + assert_eq!( + header.to_str().unwrap(), + "fr-CH, fr;q=0.9, de;q=0.9, en;q=0.8, *;q=0.5" + ); + } +} diff --git a/matrix-authentication-service/crates/axum-utils/src/lib.rs b/matrix-authentication-service/crates/axum-utils/src/lib.rs new file mode 100644 index 00000000..a4e769dc --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/lib.rs @@ -0,0 +1,27 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![deny(clippy::future_not_send)] +#![allow(clippy::module_name_repetitions)] + +pub mod client_authorization; +pub mod cookies; +pub mod csrf; +pub mod error_wrapper; +pub mod fancy_error; +pub mod jwt; +pub mod language_detection; +pub mod sentry; +pub mod session; +pub mod user_authorization; + +pub use axum; + +pub use self::{ + error_wrapper::ErrorWrapper, + fancy_error::{GenericError, InternalError}, + session::{SessionInfo, SessionInfoExt}, +}; diff --git a/matrix-authentication-service/crates/axum-utils/src/sentry.rs b/matrix-authentication-service/crates/axum-utils/src/sentry.rs new file mode 100644 index 00000000..9cf26301 --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/sentry.rs @@ -0,0 +1,65 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::convert::Infallible; + +use axum::response::{IntoResponseParts, ResponseParts}; +use sentry::types::Uuid; + +/// A wrapper to include a Sentry event ID in the response headers. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct SentryEventID(Uuid); + +impl SentryEventID { + /// Create a new Sentry event ID header for the last event on the hub. + pub fn for_last_event() -> Option { + sentry::last_event_id().map(Self) + } +} + +impl From for SentryEventID { + fn from(uuid: Uuid) -> Self { + Self(uuid) + } +} + +impl IntoResponseParts for SentryEventID { + type Error = Infallible; + fn into_response_parts(self, mut res: ResponseParts) -> Result { + res.headers_mut() + .insert("X-Sentry-Event-ID", self.0.to_string().parse().unwrap()); + + Ok(res) + } +} + +/// Record an error. It will emit a tracing event with the error level if +/// matches the pattern, warning otherwise. It also returns the Sentry event ID +/// if the error was recorded. +#[macro_export] +macro_rules! record_error { + ($error:expr, !) => {{ + tracing::warn!(message = &$error as &dyn std::error::Error); + Option::<$crate::sentry::SentryEventID>::None + }}; + + ($error:expr) => {{ + tracing::error!(message = &$error as &dyn std::error::Error); + + // With the `sentry-tracing` integration, Sentry should have + // captured an error, so let's extract the last event ID from the + // current hub + $crate::sentry::SentryEventID::for_last_event() + }}; + + ($error:expr, $pattern:pat) => { + if let $pattern = $error { + record_error!($error) + } else { + record_error!($error, !) + } + }; +} diff --git a/matrix-authentication-service/crates/axum-utils/src/session.rs b/matrix-authentication-service/crates/axum-utils/src/session.rs new file mode 100644 index 00000000..b5ed670a --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/session.rs @@ -0,0 +1,101 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_data_model::BrowserSession; +use mas_storage::RepositoryAccess; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::cookies::CookieJar; + +/// An encrypted cookie to save the session ID +#[derive(Serialize, Deserialize, Debug, Default, Clone)] +pub struct SessionInfo { + current: Option, +} + +impl SessionInfo { + /// Forge the cookie from a [`BrowserSession`] + #[must_use] + pub fn from_session(session: &BrowserSession) -> Self { + Self { + current: Some(session.id), + } + } + + /// Mark the session as ended + #[must_use] + pub fn mark_session_ended(mut self) -> Self { + self.current = None; + self + } + + /// Load the active [`BrowserSession`] from database + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails to load the session. + pub async fn load_active_session( + &self, + repo: &mut impl RepositoryAccess, + ) -> Result, E> { + let Some(session_id) = self.current else { + return Ok(None); + }; + + let maybe_session = repo + .browser_session() + .lookup(session_id) + .await? + // Ensure that the session is still active + .filter(BrowserSession::active); + + Ok(maybe_session) + } + + /// Get the current session ID, if any + #[must_use] + pub fn current_session_id(&self) -> Option { + self.current + } +} + +pub trait SessionInfoExt { + #[must_use] + fn session_info(self) -> (SessionInfo, Self); + + #[must_use] + fn update_session_info(self, info: &SessionInfo) -> Self; + + #[must_use] + fn set_session(self, session: &BrowserSession) -> Self + where + Self: Sized, + { + let session_info = SessionInfo::from_session(session); + self.update_session_info(&session_info) + } +} + +impl SessionInfoExt for CookieJar { + fn session_info(self) -> (SessionInfo, Self) { + let info = match self.load("session") { + Ok(Some(s)) => s, + Ok(None) => SessionInfo::default(), + Err(e) => { + tracing::error!("failed to load session cookie: {}", e); + SessionInfo::default() + } + }; + + let jar = self.update_session_info(&info); + (info, jar) + } + + fn update_session_info(self, info: &SessionInfo) -> Self { + self.save("session", info, true) + } +} diff --git a/matrix-authentication-service/crates/axum-utils/src/user_authorization.rs b/matrix-authentication-service/crates/axum-utils/src/user_authorization.rs new file mode 100644 index 00000000..395ce465 --- /dev/null +++ b/matrix-authentication-service/crates/axum-utils/src/user_authorization.rs @@ -0,0 +1,338 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, error::Error}; + +use axum::{ + extract::{ + Form, FromRequest, FromRequestParts, + rejection::{FailedToDeserializeForm, FormRejection}, + }, + response::{IntoResponse, Response}, +}; +use axum_extra::typed_header::{TypedHeader, TypedHeaderRejectionReason}; +use headers::{Authorization, Header, HeaderMapExt, HeaderName, authorization::Bearer}; +use http::{HeaderMap, HeaderValue, Request, StatusCode, header::WWW_AUTHENTICATE}; +use mas_data_model::{Clock, Session}; +use mas_storage::{ + RepositoryAccess, + oauth2::{OAuth2AccessTokenRepository, OAuth2SessionRepository}, +}; +use serde::{Deserialize, de::DeserializeOwned}; +use thiserror::Error; + +#[derive(Debug, Deserialize)] +struct AuthorizedForm { + #[serde(default)] + access_token: Option, + + #[serde(flatten)] + inner: F, +} + +#[derive(Debug)] +enum AccessToken { + Form(String), + Header(String), + None, +} + +impl AccessToken { + async fn fetch( + &self, + repo: &mut impl RepositoryAccess, + ) -> Result<(mas_data_model::AccessToken, Session), AuthorizationVerificationError> { + let token = match self { + AccessToken::Form(t) | AccessToken::Header(t) => t, + AccessToken::None => return Err(AuthorizationVerificationError::MissingToken), + }; + + let token = repo + .oauth2_access_token() + .find_by_token(token.as_str()) + .await? + .ok_or(AuthorizationVerificationError::InvalidToken)?; + + let session = repo + .oauth2_session() + .lookup(token.session_id) + .await? + .ok_or(AuthorizationVerificationError::InvalidToken)?; + + Ok((token, session)) + } +} + +#[derive(Debug)] +pub struct UserAuthorization { + access_token: AccessToken, + form: Option, +} + +impl UserAuthorization { + // TODO: take scopes to validate as parameter + /// Verify a user authorization and return the session and the protected + /// form value + /// + /// # Errors + /// + /// Returns an error if the token is invalid, if the user session ended or + /// if the form is missing + pub async fn protected_form( + self, + repo: &mut impl RepositoryAccess, + clock: &impl Clock, + ) -> Result<(Session, F), AuthorizationVerificationError> { + let Some(form) = self.form else { + return Err(AuthorizationVerificationError::MissingForm); + }; + + let (token, session) = self.access_token.fetch(repo).await?; + + if !token.is_valid(clock.now()) || !session.is_valid() { + return Err(AuthorizationVerificationError::InvalidToken); + } + + Ok((session, form)) + } + + // TODO: take scopes to validate as parameter + /// Verify a user authorization and return the session + /// + /// # Errors + /// + /// Returns an error if the token is invalid or if the user session ended + pub async fn protected( + self, + repo: &mut impl RepositoryAccess, + clock: &impl Clock, + ) -> Result> { + let (token, session) = self.access_token.fetch(repo).await?; + + if !token.is_valid(clock.now()) || !session.is_valid() { + return Err(AuthorizationVerificationError::InvalidToken); + } + + if !token.is_used() { + // Mark the token as used + repo.oauth2_access_token().mark_used(clock, token).await?; + } + + Ok(session) + } +} + +pub enum UserAuthorizationError { + InvalidHeader, + TokenInFormAndHeader, + BadForm(FailedToDeserializeForm), + Internal(Box), +} + +#[derive(Debug, Error)] +pub enum AuthorizationVerificationError { + #[error("missing token")] + MissingToken, + + #[error("invalid token")] + InvalidToken, + + #[error("missing form")] + MissingForm, + + #[error(transparent)] + Internal(#[from] E), +} + +enum BearerError { + InvalidRequest, + InvalidToken, + #[allow(dead_code)] + InsufficientScope { + scope: Option, + }, +} + +impl BearerError { + fn error(&self) -> HeaderValue { + match self { + BearerError::InvalidRequest => HeaderValue::from_static("invalid_request"), + BearerError::InvalidToken => HeaderValue::from_static("invalid_token"), + BearerError::InsufficientScope { .. } => HeaderValue::from_static("insufficient_scope"), + } + } + + fn params(&self) -> HashMap<&'static str, HeaderValue> { + match self { + BearerError::InsufficientScope { scope: Some(scope) } => { + let mut m = HashMap::new(); + m.insert("scope", scope.clone()); + m + } + _ => HashMap::new(), + } + } +} + +enum WwwAuthenticate { + #[allow(dead_code)] + Basic { realm: HeaderValue }, + Bearer { + realm: Option, + error: BearerError, + error_description: Option, + }, +} + +impl Header for WwwAuthenticate { + fn name() -> &'static HeaderName { + &WWW_AUTHENTICATE + } + + fn decode<'i, I>(_values: &mut I) -> Result + where + Self: Sized, + I: Iterator, + { + Err(headers::Error::invalid()) + } + + fn encode>(&self, values: &mut E) { + let (scheme, params) = match self { + WwwAuthenticate::Basic { realm } => { + let mut params = HashMap::new(); + params.insert("realm", realm.clone()); + ("Basic", params) + } + WwwAuthenticate::Bearer { + realm, + error, + error_description, + } => { + let mut params = error.params(); + params.insert("error", error.error()); + + if let Some(realm) = realm { + params.insert("realm", realm.clone()); + } + + if let Some(error_description) = error_description { + params.insert("error_description", error_description.clone()); + } + + ("Bearer", params) + } + }; + + let params = params.into_iter().map(|(k, v)| format!(" {k}={v:?}")); + let value: String = std::iter::once(scheme.to_owned()).chain(params).collect(); + let value = HeaderValue::from_str(&value).unwrap(); + values.extend(std::iter::once(value)); + } +} + +impl IntoResponse for UserAuthorizationError { + fn into_response(self) -> Response { + match self { + Self::BadForm(_) | Self::InvalidHeader | Self::TokenInFormAndHeader => { + let mut headers = HeaderMap::new(); + + headers.typed_insert(WwwAuthenticate::Bearer { + realm: None, + error: BearerError::InvalidRequest, + error_description: None, + }); + (StatusCode::BAD_REQUEST, headers).into_response() + } + Self::Internal(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), + } + } +} + +impl IntoResponse for AuthorizationVerificationError +where + E: ToString, +{ + fn into_response(self) -> Response { + match self { + Self::MissingForm | Self::MissingToken => { + let mut headers = HeaderMap::new(); + + headers.typed_insert(WwwAuthenticate::Bearer { + realm: None, + error: BearerError::InvalidRequest, + error_description: None, + }); + (StatusCode::BAD_REQUEST, headers).into_response() + } + Self::InvalidToken => { + let mut headers = HeaderMap::new(); + + headers.typed_insert(WwwAuthenticate::Bearer { + realm: None, + error: BearerError::InvalidToken, + error_description: None, + }); + (StatusCode::BAD_REQUEST, headers).into_response() + } + Self::Internal(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), + } + } +} + +impl FromRequest for UserAuthorization +where + F: DeserializeOwned, + S: Send + Sync, +{ + type Rejection = UserAuthorizationError; + + async fn from_request( + req: Request, + state: &S, + ) -> Result { + let (mut parts, body) = req.into_parts(); + let header = + TypedHeader::>::from_request_parts(&mut parts, state).await; + + // Take the Authorization header + let token_from_header = match header { + Ok(header) => Some(header.token().to_owned()), + Err(err) => match err.reason() { + // If it's missing it is fine + TypedHeaderRejectionReason::Missing => None, + // If the header could not be parsed, return the error + _ => return Err(UserAuthorizationError::InvalidHeader), + }, + }; + + let req = Request::from_parts(parts, body); + + // Take the form value + let (token_from_form, form) = + match Form::>::from_request(req, state).await { + Ok(Form(form)) => (form.access_token, Some(form.inner)), + // If it is not a form, continue + Err(FormRejection::InvalidFormContentType(_err)) => (None, None), + // If the form could not be read, return a Bad Request error + Err(FormRejection::FailedToDeserializeForm(err)) => { + return Err(UserAuthorizationError::BadForm(err)); + } + // Other errors (body read twice, byte stream broke) return an internal error + Err(e) => return Err(UserAuthorizationError::Internal(Box::new(e))), + }; + + let access_token = match (token_from_header, token_from_form) { + // Ensure the token should not be in both the form and the access token + (Some(_), Some(_)) => return Err(UserAuthorizationError::TokenInFormAndHeader), + (Some(t), None) => AccessToken::Header(t), + (None, Some(t)) => AccessToken::Form(t), + (None, None) => AccessToken::None, + }; + + Ok(UserAuthorization { access_token, form }) + } +} diff --git a/matrix-authentication-service/crates/cli/Cargo.toml b/matrix-authentication-service/crates/cli/Cargo.toml new file mode 100644 index 00000000..5341a15f --- /dev/null +++ b/matrix-authentication-service/crates/cli/Cargo.toml @@ -0,0 +1,103 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-cli" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +build = "build.rs" + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +axum.workspace = true +bytes.workspace = true +camino.workspace = true +chrono.workspace = true +clap.workspace = true +console.workspace = true +dialoguer.workspace = true +dotenvy.workspace = true +figment.workspace = true +futures-util.workspace = true +headers.workspace = true +http-body-util.workspace = true +hyper.workspace = true +ipnetwork.workspace = true +itertools.workspace = true +listenfd.workspace = true +rand.workspace = true +rand_chacha.workspace = true +reqwest.workspace = true +rustls.workspace = true +sd-notify.workspace = true +serde_json.workspace = true +serde_yaml.workspace = true +sqlx.workspace = true +tokio.workspace = true +tokio-util.workspace = true +tower.workspace = true +tower-http.workspace = true +url.workspace = true +zeroize.workspace = true + +tracing.workspace = true +tracing-appender.workspace = true +tracing-subscriber.workspace = true +tracing-opentelemetry.workspace = true +opentelemetry.workspace = true +opentelemetry-http.workspace = true +opentelemetry-instrumentation-process.workspace = true +opentelemetry-instrumentation-tokio.workspace = true +opentelemetry-jaeger-propagator.workspace = true +opentelemetry-otlp.workspace = true +opentelemetry-prometheus-text-exporter.workspace = true +opentelemetry-resource-detectors.workspace = true +opentelemetry-semantic-conventions.workspace = true +opentelemetry-stdout.workspace = true +opentelemetry_sdk.workspace = true +sentry.workspace = true +sentry-tracing.workspace = true +sentry-tower.workspace = true + +mas-config.workspace = true +mas-context.workspace = true +mas-data-model.workspace = true +mas-email.workspace = true +mas-handlers.workspace = true +mas-http.workspace = true +mas-i18n.workspace = true +mas-keystore.workspace = true +mas-listener.workspace = true +mas-matrix.workspace = true +mas-matrix-synapse.workspace = true +mas-policy.workspace = true +mas-router.workspace = true +mas-storage.workspace = true +mas-storage-pg.workspace = true +mas-tasks.workspace = true +mas-templates.workspace = true +mas-tower.workspace = true + +syn2mas.workspace = true + +[build-dependencies] +anyhow.workspace = true +vergen-gitcl.workspace = true + +[features] +# Features used for the prebuilt binaries +dist = ["mas-config/dist"] + +# Features used in the Docker image +docker = ["mas-config/docker"] diff --git a/matrix-authentication-service/crates/cli/build.rs b/matrix-authentication-service/crates/cli/build.rs new file mode 100644 index 00000000..fd111273 --- /dev/null +++ b/matrix-authentication-service/crates/cli/build.rs @@ -0,0 +1,36 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use vergen_gitcl::{Emitter, GitclBuilder, RustcBuilder}; + +fn main() -> anyhow::Result<()> { + // Instruct rustc that we'll be using #[cfg(tokio_unstable)] + println!("cargo::rustc-check-cfg=cfg(tokio_unstable)"); + + // At build time, we override the version through the environment variable + // VERGEN_GIT_DESCRIBE. In some contexts, it means this variable is set but + // empty, so we unset it here. + if let Ok(ver) = std::env::var("VERGEN_GIT_DESCRIBE") + && ver.is_empty() + { + #[allow(unsafe_code)] + // SAFETY: This is safe because the build script is running a single thread + unsafe { + std::env::remove_var("VERGEN_GIT_DESCRIBE"); + } + } + + let gitcl = GitclBuilder::default() + .describe(true, false, Some("v*.*.*")) + .build()?; + let rustc = RustcBuilder::default().semver(true).build()?; + + Emitter::default() + .add_instructions(&gitcl)? + .add_instructions(&rustc)? + .emit()?; + + Ok(()) +} diff --git a/matrix-authentication-service/crates/cli/src/app_state.rs b/matrix-authentication-service/crates/cli/src/app_state.rs new file mode 100644 index 00000000..f211fc29 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/app_state.rs @@ -0,0 +1,374 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{convert::Infallible, net::IpAddr, sync::Arc}; + +use axum::extract::{FromRef, FromRequestParts}; +use ipnetwork::IpNetwork; +use mas_context::LogContext; +use mas_data_model::{AppVersion, BoxClock, BoxRng, SiteConfig, SystemClock}; +use mas_handlers::{ + ActivityTracker, BoundActivityTracker, CookieManager, ErrorWrapper, GraphQLSchema, Limiter, + MetadataCache, RequesterFingerprint, passwords::PasswordManager, +}; +use mas_i18n::Translator; +use mas_keystore::{Encrypter, Keystore}; +use mas_matrix::HomeserverConnection; +use mas_policy::{Policy, PolicyFactory}; +use mas_router::UrlBuilder; +use mas_storage::{BoxRepository, BoxRepositoryFactory, RepositoryFactory}; +use mas_storage_pg::PgRepositoryFactory; +use mas_templates::Templates; +use opentelemetry::KeyValue; +use rand::SeedableRng; +use sqlx::PgPool; +use tracing::Instrument; + +use crate::{VERSION, telemetry::METER}; + +#[derive(Clone)] +pub struct AppState { + pub repository_factory: PgRepositoryFactory, + pub templates: Templates, + pub key_store: Keystore, + pub cookie_manager: CookieManager, + pub encrypter: Encrypter, + pub url_builder: UrlBuilder, + pub homeserver_connection: Arc, + pub policy_factory: Arc, + pub graphql_schema: GraphQLSchema, + pub http_client: reqwest::Client, + pub password_manager: PasswordManager, + pub metadata_cache: MetadataCache, + pub site_config: SiteConfig, + pub activity_tracker: ActivityTracker, + pub trusted_proxies: Vec, + pub limiter: Limiter, +} + +impl AppState { + /// Init the metrics for the app state. + pub fn init_metrics(&mut self) { + let pool = self.repository_factory.pool(); + METER + .i64_observable_up_down_counter("db.connections.usage") + .with_description("The number of connections that are currently in `state` described by the state attribute.") + .with_unit("{connection}") + .with_callback(move |instrument| { + let idle = u32::try_from(pool.num_idle()).unwrap_or(u32::MAX); + let used = pool.size() - idle; + instrument.observe(i64::from(idle), &[KeyValue::new("state", "idle")]); + instrument.observe(i64::from(used), &[KeyValue::new("state", "used")]); + }) + .build(); + + let pool = self.repository_factory.pool(); + METER + .i64_observable_up_down_counter("db.connections.max") + .with_description("The maximum number of open connections allowed.") + .with_unit("{connection}") + .with_callback(move |instrument| { + let max_conn = pool.options().get_max_connections(); + instrument.observe(i64::from(max_conn), &[]); + }) + .build(); + } + + /// Init the metadata cache in the background + pub fn init_metadata_cache(&self) { + let factory = self.repository_factory.clone(); + let metadata_cache = self.metadata_cache.clone(); + let http_client = self.http_client.clone(); + + tokio::spawn( + LogContext::new("metadata-cache-warmup") + .run(async move || { + let mut repo = match factory.create().await { + Ok(conn) => conn, + Err(e) => { + tracing::error!( + error = &e as &dyn std::error::Error, + "Failed to acquire a database connection" + ); + return; + } + }; + + if let Err(e) = metadata_cache + .warm_up_and_run( + &http_client, + std::time::Duration::from_secs(60 * 15), + &mut repo, + ) + .await + { + tracing::error!( + error = &e as &dyn std::error::Error, + "Failed to warm up the metadata cache" + ); + } + }) + .instrument(tracing::info_span!("metadata_cache.background_warmup")), + ); + } +} + +// XXX(quenting): we only use this for the healthcheck endpoint, checking the db +// should be part of the repository +impl FromRef for PgPool { + fn from_ref(input: &AppState) -> Self { + input.repository_factory.pool() + } +} + +impl FromRef for BoxRepositoryFactory { + fn from_ref(input: &AppState) -> Self { + input.repository_factory.clone().boxed() + } +} + +impl FromRef for GraphQLSchema { + fn from_ref(input: &AppState) -> Self { + input.graphql_schema.clone() + } +} + +impl FromRef for Templates { + fn from_ref(input: &AppState) -> Self { + input.templates.clone() + } +} + +impl FromRef for Arc { + fn from_ref(input: &AppState) -> Self { + input.templates.translator() + } +} + +impl FromRef for Keystore { + fn from_ref(input: &AppState) -> Self { + input.key_store.clone() + } +} + +impl FromRef for Encrypter { + fn from_ref(input: &AppState) -> Self { + input.encrypter.clone() + } +} + +impl FromRef for UrlBuilder { + fn from_ref(input: &AppState) -> Self { + input.url_builder.clone() + } +} + +impl FromRef for reqwest::Client { + fn from_ref(input: &AppState) -> Self { + input.http_client.clone() + } +} + +impl FromRef for PasswordManager { + fn from_ref(input: &AppState) -> Self { + input.password_manager.clone() + } +} + +impl FromRef for CookieManager { + fn from_ref(input: &AppState) -> Self { + input.cookie_manager.clone() + } +} + +impl FromRef for MetadataCache { + fn from_ref(input: &AppState) -> Self { + input.metadata_cache.clone() + } +} + +impl FromRef for SiteConfig { + fn from_ref(input: &AppState) -> Self { + input.site_config.clone() + } +} + +impl FromRef for Limiter { + fn from_ref(input: &AppState) -> Self { + input.limiter.clone() + } +} + +impl FromRef for Arc { + fn from_ref(input: &AppState) -> Self { + input.policy_factory.clone() + } +} + +impl FromRef for Arc { + fn from_ref(input: &AppState) -> Self { + Arc::clone(&input.homeserver_connection) + } +} + +impl FromRef for AppVersion { + fn from_ref(_input: &AppState) -> Self { + AppVersion(VERSION) + } +} + +impl FromRequestParts for BoxClock { + type Rejection = Infallible; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + _state: &AppState, + ) -> Result { + let clock = SystemClock::default(); + Ok(Box::new(clock)) + } +} + +impl FromRequestParts for BoxRng { + type Rejection = Infallible; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + _state: &AppState, + ) -> Result { + // This rng is used to source the local rng + #[allow(clippy::disallowed_methods)] + let rng = rand::thread_rng(); + + let rng = rand_chacha::ChaChaRng::from_rng(rng).expect("Failed to seed RNG"); + Ok(Box::new(rng)) + } +} + +impl FromRequestParts for Policy { + type Rejection = ErrorWrapper; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + state: &AppState, + ) -> Result { + let policy = state.policy_factory.instantiate().await?; + Ok(policy) + } +} + +impl FromRequestParts for ActivityTracker { + type Rejection = Infallible; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + state: &AppState, + ) -> Result { + Ok(state.activity_tracker.clone()) + } +} + +fn infer_client_ip( + parts: &axum::http::request::Parts, + trusted_proxies: &[IpNetwork], +) -> Option { + let connection_info = parts.extensions.get::(); + + let peer = if let Some(info) = connection_info { + // We can always trust the proxy protocol to give us the correct IP address + if let Some(proxy) = info.get_proxy_ref() + && let Some(source) = proxy.source() + { + return Some(source.ip()); + } + + info.get_peer_addr().map(|addr| addr.ip()) + } else { + None + }; + + // Get the list of IPs from the X-Forwarded-For header + let peers_from_header = parts + .headers + .get("x-forwarded-for") + .and_then(|value| value.to_str().ok()) + .map(|value| value.split(',').filter_map(|v| v.parse().ok())) + .into_iter() + .flatten(); + + // This constructs a list of IP addresses that might be the client's IP address. + // Each intermediate proxy is supposed to add the client's IP address to front + // of the list. We are effectively adding the IP we got from the socket to the + // front of the list. + // We also call `to_canonical` so that IPv6-mapped IPv4 addresses + // (::ffff:A.B.C.D) are converted to IPv4. + let peer_list: Vec = peer + .into_iter() + .chain(peers_from_header) + .map(|ip| ip.to_canonical()) + .collect(); + + // We'll fallback to the first IP in the list if all the IPs we got are trusted + let fallback = peer_list.first().copied(); + + // Now we go through the list, and the IP of the client is the first IP that is + // not in the list of trusted proxies, starting from the back. + let client_ip = peer_list + .iter() + .rfind(|ip| !trusted_proxies.iter().any(|network| network.contains(**ip))) + .copied(); + + client_ip.or(fallback) +} + +impl FromRequestParts for BoundActivityTracker { + type Rejection = Infallible; + + async fn from_request_parts( + parts: &mut axum::http::request::Parts, + state: &AppState, + ) -> Result { + // TODO: we may infer the IP twice, for the activity tracker and the limiter + let ip = infer_client_ip(parts, &state.trusted_proxies); + tracing::debug!(ip = ?ip, "Inferred client IP address"); + Ok(state.activity_tracker.clone().bind(ip)) + } +} + +impl FromRequestParts for RequesterFingerprint { + type Rejection = Infallible; + + async fn from_request_parts( + parts: &mut axum::http::request::Parts, + state: &AppState, + ) -> Result { + // TODO: we may infer the IP twice, for the activity tracker and the limiter + let ip = infer_client_ip(parts, &state.trusted_proxies); + + if let Some(ip) = ip { + Ok(RequesterFingerprint::new(ip)) + } else { + // If we can't infer the IP address, we'll just use an empty fingerprint and + // warn about it + tracing::warn!( + "Could not infer client IP address for an operation which rate-limits based on IP addresses" + ); + Ok(RequesterFingerprint::EMPTY) + } + } +} + +impl FromRequestParts for BoxRepository { + type Rejection = ErrorWrapper; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + state: &AppState, + ) -> Result { + let repo = state.repository_factory.create().await?; + Ok(repo) + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/config.rs b/matrix-authentication-service/crates/cli/src/commands/config.rs new file mode 100644 index 00000000..ae4d329f --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/config.rs @@ -0,0 +1,151 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::process::ExitCode; + +use anyhow::Context; +use camino::Utf8PathBuf; +use clap::Parser; +use figment::Figment; +use mas_config::{ConfigurationSection, RootConfig, SyncConfig}; +use mas_data_model::{Clock as _, SystemClock}; +use rand::SeedableRng; +use tokio::io::AsyncWriteExt; +use tracing::{info, info_span}; + +use crate::util::database_connection_from_config; + +#[derive(Parser, Debug)] +pub(super) struct Options { + #[command(subcommand)] + subcommand: Subcommand, +} + +#[derive(Parser, Debug)] +enum Subcommand { + /// Dump the current config as YAML + Dump { + /// The path to the config file to dump + /// + /// If not specified, the config will be written to stdout + #[clap(short, long)] + output: Option, + }, + + /// Check a config file + Check, + + /// Generate a new config file + Generate { + /// The path to the config file to generate + /// + /// If not specified, the config will be written to stdout + #[clap(short, long)] + output: Option, + + /// Existing Synapse configuration used to generate the MAS config + #[arg(short, long, action = clap::ArgAction::Append)] + synapse_config: Vec, + }, + + /// Sync the clients and providers from the config file to the database + Sync { + /// Prune elements that are in the database but not in the config file + /// anymore + #[clap(long)] + prune: bool, + + /// Do not actually write to the database + #[clap(long)] + dry_run: bool, + }, +} + +impl Options { + pub async fn run(self, figment: &Figment) -> anyhow::Result { + use Subcommand as SC; + match self.subcommand { + SC::Dump { output } => { + let _span = info_span!("cli.config.dump").entered(); + + let config = RootConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + let config = serde_yaml::to_string(&config)?; + + if let Some(output) = output { + info!("Writing configuration to {output:?}"); + let mut file = tokio::fs::File::create(output).await?; + file.write_all(config.as_bytes()).await?; + } else { + info!("Writing configuration to standard output"); + tokio::io::stdout().write_all(config.as_bytes()).await?; + } + } + + SC::Check => { + let _span = info_span!("cli.config.check").entered(); + + let _config = RootConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + info!("Configuration file looks good"); + } + + SC::Generate { + output, + synapse_config, + } => { + let _span = info_span!("cli.config.generate").entered(); + let clock = SystemClock::default(); + + // XXX: we should disallow SeedableRng::from_entropy + let mut rng = rand_chacha::ChaChaRng::from_entropy(); + let mut config = RootConfig::generate(&mut rng).await?; + + if !synapse_config.is_empty() { + info!("Adjusting MAS config to match Synapse config from {synapse_config:?}"); + let synapse_config = syn2mas::synapse_config::Config::load(&synapse_config) + .map_err(anyhow::Error::from_boxed)?; + config = synapse_config.adjust_mas_config(config, &mut rng, clock.now()); + } + + let config = serde_yaml::to_string(&config)?; + if let Some(output) = output { + info!("Writing configuration to {output:?}"); + let mut file = tokio::fs::File::create(output).await?; + file.write_all(config.as_bytes()).await?; + } else { + info!("Writing configuration to standard output"); + tokio::io::stdout().write_all(config.as_bytes()).await?; + } + } + + SC::Sync { prune, dry_run } => { + let config = SyncConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + let clock = SystemClock::default(); + let encrypter = config.secrets.encrypter().await?; + + // Grab a connection to the database + let mut conn = database_connection_from_config(&config.database).await?; + + mas_storage_pg::migrate(&mut conn) + .await + .context("could not run migrations")?; + + crate::sync::config_sync( + config.upstream_oauth2, + config.clients, + &mut conn, + &encrypter, + &clock, + prune, + dry_run, + ) + .await + .context("could not sync the configuration with the database")?; + } + } + + Ok(ExitCode::SUCCESS) + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/database.rs b/matrix-authentication-service/crates/cli/src/commands/database.rs new file mode 100644 index 00000000..7acc6830 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/database.rs @@ -0,0 +1,43 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::process::ExitCode; + +use anyhow::Context; +use clap::Parser; +use figment::Figment; +use mas_config::{ConfigurationSectionExt, DatabaseConfig}; +use tracing::info_span; + +use crate::util::database_connection_from_config; + +#[derive(Parser, Debug)] +pub(super) struct Options { + #[command(subcommand)] + subcommand: Subcommand, +} + +#[derive(Parser, Debug)] +enum Subcommand { + /// Run database migrations + Migrate, +} + +impl Options { + pub async fn run(self, figment: &Figment) -> anyhow::Result { + let _span = info_span!("cli.database.migrate").entered(); + let config = + DatabaseConfig::extract_or_default(figment).map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&config).await?; + + // Run pending migrations + mas_storage_pg::migrate(&mut conn) + .await + .context("could not run migrations")?; + + Ok(ExitCode::SUCCESS) + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/debug.rs b/matrix-authentication-service/crates/cli/src/commands/debug.rs new file mode 100644 index 00000000..6da64f95 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/debug.rs @@ -0,0 +1,70 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::process::ExitCode; + +use clap::Parser; +use figment::Figment; +use mas_config::{ + ConfigurationSection, ConfigurationSectionExt, DatabaseConfig, ExperimentalConfig, + MatrixConfig, PolicyConfig, +}; +use mas_storage_pg::PgRepositoryFactory; +use tracing::{info, info_span}; + +use crate::util::{ + database_pool_from_config, load_policy_factory_dynamic_data, policy_factory_from_config, +}; + +#[derive(Parser, Debug)] +pub(super) struct Options { + #[command(subcommand)] + subcommand: Subcommand, +} + +#[derive(Parser, Debug)] +enum Subcommand { + /// Check that the policies compile + Policy { + /// With dynamic data loaded + #[arg(long)] + with_dynamic_data: bool, + }, +} + +impl Options { + #[tracing::instrument(skip_all)] + pub async fn run(self, figment: &Figment) -> anyhow::Result { + use Subcommand as SC; + match self.subcommand { + SC::Policy { with_dynamic_data } => { + let _span = info_span!("cli.debug.policy").entered(); + let config = + PolicyConfig::extract_or_default(figment).map_err(anyhow::Error::from_boxed)?; + let matrix_config = + MatrixConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + let experimental_config = + ExperimentalConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + info!("Loading and compiling the policy module"); + let policy_factory = + policy_factory_from_config(&config, &matrix_config, &experimental_config) + .await?; + + if with_dynamic_data { + let database_config = + DatabaseConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + let pool = database_pool_from_config(&database_config).await?; + let repository_factory = PgRepositoryFactory::new(pool.clone()); + load_policy_factory_dynamic_data(&policy_factory, &repository_factory).await?; + } + + let _instance = policy_factory.instantiate().await?; + } + } + + Ok(ExitCode::SUCCESS) + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/doctor.rs b/matrix-authentication-service/crates/cli/src/commands/doctor.rs new file mode 100644 index 00000000..132a7db8 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/doctor.rs @@ -0,0 +1,410 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Diagnostic utility to check the health of the deployment +//! +//! The code is quite repetitive for now, but we can refactor later with a +//! better check abstraction + +use std::process::ExitCode; + +use anyhow::Context; +use clap::Parser; +use figment::Figment; +use hyper::StatusCode; +use mas_config::{ConfigurationSection, RootConfig}; +use mas_http::RequestBuilderExt; +use tracing::{error, info, info_span, warn}; +use url::{Host, Url}; + +/// Base URL for the human-readable documentation +const DOCS_BASE: &str = "https://element-hq.github.io/matrix-authentication-service"; + +#[derive(Parser, Debug)] +pub(super) struct Options {} + +impl Options { + pub async fn run(self, figment: &Figment) -> anyhow::Result { + let _span = info_span!("cli.doctor").entered(); + info!( + "💡 Running diagnostics, make sure that both MAS and Synapse are running, and that MAS is using the same configuration files as this tool." + ); + + let config = RootConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + + // We'll need an HTTP client + let http_client = mas_http::reqwest_client(); + let base_url = config.http.public_base.as_str(); + let issuer = config.http.issuer.as_ref().map(url::Url::as_str); + let issuer = issuer.unwrap_or(base_url); + let matrix_domain: Host = Host::parse(&config.matrix.homeserver).context( + r"The homeserver host in the config (`matrix.homeserver`) is not a valid domain. +See {DOCS_BASE}/setup/homeserver.html", + )?; + let secret = config.matrix.secret().await?; + let hs_api = config.matrix.endpoint; + + if !issuer.starts_with("https://") { + warn!( + r"⚠️ The issuer in the config (`http.issuer`/`http.public_base`) is not an HTTPS URL. +This means some clients will refuse to use it." + ); + } + + let well_known_uri = format!("https://{matrix_domain}/.well-known/matrix/client"); + let result = http_client.get(&well_known_uri).send_traced().await; + + let expected_well_known = serde_json::json!({ + "m.homeserver": { + "base_url": "...", + }, + "org.matrix.msc2965.authentication": { + "issuer": issuer, + "account": format!("{base_url}account/"), + }, + }); + + let discovered_cs_api = match result { + Ok(response) => { + // Make sure we got a 2xx response + let status = response.status(); + if !status.is_success() { + warn!( + r#"⚠️ Matrix client well-known replied with {status}, expected 2xx. +Make sure the homeserver is reachable and the well-known document is available at "{well_known_uri}""#, + ); + } + + let result = response.json::().await; + + match result { + Ok(body) => { + if let Some(auth) = body.get("org.matrix.msc2965.authentication") { + if let Some(wk_issuer) = + auth.get("issuer").and_then(|issuer| issuer.as_str()) + { + if issuer == wk_issuer { + info!( + r#"✅ Matrix client well-known at "{well_known_uri}" is valid"# + ); + } else { + warn!( + r#"⚠️ Matrix client well-known has an "org.matrix.msc2965.authentication" section, but the issuer is not the same as the homeserver. +Check the well-known document at "{well_known_uri}" +This can happen because MAS parses the URL its config differently from the homeserver. +This means some OIDC-native clients might not work. +Make sure that the MAS config contains: + + http: + public_base: {issuer:?} + +And in the Synapse config: + + matrix_authentication_service: + enabled: true + # This must point to where MAS is reachable by Synapse + endpoint: {issuer:?} + # ... + +See {DOCS_BASE}/setup/homeserver.html +"# + ); + } + } else { + error!( + r#"❌ Matrix client well-known "org.matrix.msc2965.authentication" does not have a valid "issuer" field. +Check the well-known document at "{well_known_uri}" +"# + ); + } + } else { + warn!( + r#"Matrix client well-known is missing the "org.matrix.msc2965.authentication" section. +Check the well-known document at "{well_known_uri}" +Make sure Synapse has delegated auth enabled: + + matrix_authentication_service: + enabled: true + endpoint: {issuer:?} + # ... + +If it is not Synapse handling the well-known document, update it to include the following: + +{expected_well_known:#} + +See {DOCS_BASE}/setup/homeserver.html +"# + ); + } + // Return the discovered homeserver base URL + body.get("m.homeserver") + .and_then(|hs| hs.get("base_url")) + .and_then(|base_url| base_url.as_str()) + .and_then(|base_url| Url::parse(base_url).ok()) + } + Err(e) => { + warn!( + r#"⚠️ Invalid JSON for the well-known document at "{well_known_uri}". +Make sure going to {well_known_uri:?} in a web browser returns a valid JSON document, similar to: + +{expected_well_known:#} + +See {DOCS_BASE}/setup/homeserver.html + +Error details: {e} +"# + ); + None + } + } + } + Err(e) => { + warn!( + r#"⚠️ Failed to fetch well-known document at "{well_known_uri}". +This means that the homeserver is not reachable, the well-known document is not available, or malformed. +Make sure your homeserver is running. +Make sure going to {well_known_uri:?} in a web browser returns a valid JSON document, similar to: + +{expected_well_known:#} + +See {DOCS_BASE}/setup/homeserver.html + +Error details: {e} +"# + ); + None + } + }; + + // Now try to reach the homeserver + let client_versions = hs_api.join("/_matrix/client/versions")?; + let result = http_client + .get(client_versions.as_str()) + .send_traced() + .await; + let can_reach_cs = match result { + Ok(response) => { + let status = response.status(); + if status.is_success() { + info!(r#"✅ Homeserver is reachable at "{client_versions}""#); + true + } else { + error!( + r#"❌Can't reach the homeserver at "{client_versions}", got {status}. +Make sure your homeserver is running. +This may be due to a misconfiguration in the `matrix` section of the config. + + matrix: + homeserver: "{matrix_domain}" + # The homeserver should be reachable at this URL + endpoint: "{hs_api}" + +See {DOCS_BASE}/setup/homeserver.html +"# + ); + false + } + } + Err(e) => { + error!( + r#"❌ Can't reach the homeserver at "{client_versions}". +This may be due to a misconfiguration in the `matrix` section of the config. + + matrix: + homeserver: "{matrix_domain}" + # The homeserver should be reachable at this URL + endpoint: "{hs_api}" + +See {DOCS_BASE}/setup/homeserver.html + +Error details: {e} +"# + ); + false + } + }; + + if can_reach_cs { + // Try the whoami API. If it replies with `M_UNKNOWN` this is because Synapse + // couldn't reach MAS + let whoami = hs_api.join("/_matrix/client/v3/account/whoami")?; + let result = http_client + .get(whoami.as_str()) + .bearer_auth("averyinvalidtokenireallyhopethisisnotvalid") + .send_traced() + .await; + match result { + Ok(response) => { + let status = response.status(); + let body = response.text().await.unwrap_or("???".into()); + + match status.as_u16() { + 401 => info!( + r#"✅ Homeserver at "{whoami}" is reachable, and it correctly rejected an invalid token."# + ), + + 0..=399 => error!( + r#"❌ The homeserver at "{whoami}" replied with {status}. +This is *highly* unexpected, as this means that a fake token might have been accepted. +"# + ), + + 503 => error!( + r#"❌ The homeserver at "{whoami}" replied with {status}. +This means probably means that the homeserver was unable to reach MAS to validate the token. +Make sure MAS is running and reachable from Synapse. +Check your homeserver logs. + +This is what the homeserver told us about the error: + + {body} + +See {DOCS_BASE}/setup/homeserver.html +"# + ), + + _ => warn!( + r#"⚠️ The homeserver at "{whoami}" replied with {status}. +Check that the homeserver is running."# + ), + } + } + Err(e) => error!( + r#"❌ Can't reach the homeserver at "{whoami}". + +Error details: {e} +"# + ), + } + + // Try to reach an authenticated MAS API endpoint + let mas_api = hs_api.join("/_synapse/mas/is_localpart_available")?; + let result = http_client + .get(mas_api.as_str()) + .bearer_auth(&secret) + .send_traced() + .await; + match result { + Ok(response) => { + let status = response.status(); + // We intentionally omit the required 'localpart' parameter + // in this request. If authentication is successful, Synapse + // returns a 400 Bad Request because of the missing + // parameter. If authentication fails, Synapse will return a + // 403 Forbidden. If the MAS integration isn't enabled, + // Synapse will return a 404 Not found. + if status == StatusCode::BAD_REQUEST { + info!( + r#"✅ The Synapse MAS API is reachable with authentication at "{mas_api}"."# + ); + } else { + error!( + r#"❌ A Synapse MAS API endpoint at "{mas_api}" replied with {status}. +Make sure the homeserver is running, and that the MAS config has the correct `matrix.secret`. +It should match the `secret` set in the Synapse config. + + matrix_authentication_service: + enabled: true + endpoint: {issuer:?} + # This must exactly match the secret in the MAS config: + secret: {secret:?} + +And in the MAS config: + + matrix: + homeserver: "{matrix_domain}" + endpoint: "{hs_api}" + secret: {secret:?} +"# + ); + } + } + Err(e) => error!( + r#"❌ Can't reach the Synapse MAS API at "{mas_api}". +Make sure the homeserver is running, and that the MAS config has the correct `matrix.secret`. + +Error details: {e} +"# + ), + } + } + + let external_cs_api_endpoint = discovered_cs_api.as_ref().unwrap_or(&hs_api); + // Try to reach the legacy login API + let compat_login = external_cs_api_endpoint.join("/_matrix/client/v3/login")?; + let compat_login = compat_login.as_str(); + let result = http_client.get(compat_login).send_traced().await; + match result { + Ok(response) => { + let status = response.status(); + if status.is_success() { + // Now we need to inspect the body to figure out whether it's Synapse or MAS + // which handled the request + let body = response + .json::() + .await + .unwrap_or_default(); + let flows = body + .get("flows") + .and_then(|flows| flows.as_array()) + .map(std::vec::Vec::as_slice) + .unwrap_or_default(); + + let has_compatibility_sso = flows.iter().any(|flow| { + flow.get("type").and_then(|t| t.as_str()) == Some("m.login.sso") + && (flow + .get("oauth_aware_preferred") + .and_then(serde_json::Value::as_bool) + == Some(true) + // we check for the unstable name too: + || flow + .get("org.matrix.msc3824.delegated_oidc_compatibility") + .and_then(serde_json::Value::as_bool) + == Some(true)) + }); + + if has_compatibility_sso { + info!( + r#"✅ The legacy login API at "{compat_login}" is reachable and is handled by MAS."# + ); + } else { + warn!( + r#"⚠️ The legacy login API at "{compat_login}" is reachable, but it doesn't look to be handled by MAS. +This means legacy clients won't be able to login. +Make sure MAS is running. +Check your reverse proxy settings to make sure that this API is handled by MAS, not by Synapse. + +See {DOCS_BASE}/setup/reverse-proxy.html +"# + ); + } + } else { + error!( + r#"The legacy login API at "{compat_login}" replied with {status}. +This means legacy clients won't be able to login. +Make sure MAS is running. +Check your reverse proxy settings to make sure that this API is handled by MAS, not by Synapse. + +See {DOCS_BASE}/setup/reverse-proxy.html +"# + ); + } + } + Err(e) => warn!( + r#"⚠️ Can't reach the legacy login API at "{compat_login}". +This means legacy clients won't be able to login. +Make sure MAS is running. +Check your reverse proxy settings to make sure that this API is handled by MAS, not by Synapse. + +See {DOCS_BASE}/setup/reverse-proxy.html + +Error details: {e}"# + ), + } + + Ok(ExitCode::SUCCESS) + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/manage.rs b/matrix-authentication-service/crates/cli/src/commands/manage.rs new file mode 100644 index 00000000..5bbd870e --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/manage.rs @@ -0,0 +1,1244 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::BTreeMap, process::ExitCode}; + +use anyhow::Context; +use chrono::Duration; +use clap::{ArgAction, CommandFactory, Parser}; +use console::{Alignment, Style, Term, pad_str, style}; +use dialoguer::{Confirm, FuzzySelect, Input, Password, theme::ColorfulTheme}; +use figment::Figment; +use mas_config::{ + ConfigurationSection, ConfigurationSectionExt, DatabaseConfig, MatrixConfig, PasswordsConfig, +}; +use mas_data_model::{Clock, Device, SystemClock, TokenType, Ulid, UpstreamOAuthProvider, User}; +use mas_email::Address; +use mas_matrix::HomeserverConnection; +use mas_storage::{ + Pagination, RepositoryAccess, + compat::{CompatAccessTokenRepository, CompatSessionFilter, CompatSessionRepository}, + oauth2::OAuth2SessionFilter, + queue::{ + DeactivateUserJob, ProvisionUserJob, QueueJobRepositoryExt as _, ReactivateUserJob, + SyncDevicesJob, + }, + user::{ + BrowserSessionFilter, UserEmailRepository, UserFilter, UserPasswordRepository, + UserRepository, + }, +}; +use mas_storage_pg::{DatabaseError, PgRepository}; +use rand::{ + RngCore, SeedableRng, + distributions::{Alphanumeric, DistString as _}, +}; +use sqlx::{Acquire, types::Uuid}; +use tracing::{error, info, info_span, warn}; +use zeroize::Zeroizing; + +use crate::util::{ + database_connection_from_config, homeserver_connection_from_config, + password_manager_from_config, +}; + +const USER_ATTRIBUTES_HEADING: &str = "User attributes"; + +#[derive(Debug, Clone)] +struct UpstreamProviderMapping { + upstream_provider_id: Ulid, + subject: String, +} + +fn parse_upstream_provider_mapping(s: &str) -> Result { + let (id, subject) = s.split_once(':').context("Invalid format")?; + let upstream_provider_id = id.parse().context("Invalid upstream provider ID")?; + let subject = subject.to_owned(); + + Ok(UpstreamProviderMapping { + upstream_provider_id, + subject, + }) +} + +#[derive(Parser, Debug)] +pub(super) struct Options { + #[command(subcommand)] + subcommand: Subcommand, +} + +#[derive(Parser, Debug)] +enum Subcommand { + /// Add an email address to the specified user + AddEmail { username: String, email: String }, + + /// (DEPRECATED) Mark email address as verified + VerifyEmail { username: String, email: String }, + + /// Set a user password + SetPassword { + username: String, + password: String, + /// Don't enforce that the password provided is above the minimum + /// configured complexity. + #[clap(long)] + ignore_complexity: bool, + }, + + /// Make a user admin + PromoteAdmin { username: String }, + + /// Make a user non-admin + DemoteAdmin { username: String }, + + /// List all users with admin privileges + ListAdminUsers, + + /// Issue a compatibility token + IssueCompatibilityToken { + /// User for which to issue the token + username: String, + + /// Device ID to set in the token. If not specified, a random device ID + /// will be generated. + device_id: Option, + + /// Whether that token should be admin + #[arg(long = "yes-i-want-to-grant-synapse-admin-privileges")] + admin: bool, + }, + + /// Create a new user registration token + IssueUserRegistrationToken { + /// Specific token string to use. If not provided, a random token will + /// be generated. + #[arg(long)] + token: Option, + + /// Maximum number of times this token can be used. + /// If not provided, the token can be used only once, unless the + /// `--unlimited` flag is set. + #[arg(long, group = "token-usage-limit")] + usage_limit: Option, + + /// Allow the token to be used an unlimited number of times. + #[arg(long, action = ArgAction::SetTrue, group = "token-usage-limit")] + unlimited: bool, + + /// Time in seconds after which the token expires. + /// If not provided, the token never expires. + #[arg(long)] + expires_in: Option, + }, + + /// Trigger a provisioning job for all users + ProvisionAllUsers, + + /// Kill all sessions for a user + KillSessions { + /// User for which to kill sessions + username: String, + + /// Do a dry run + #[arg(long)] + dry_run: bool, + }, + + /// Lock a user + LockUser { + /// User to lock + username: String, + + /// Whether to deactivate the user + #[arg(long)] + deactivate: bool, + }, + + /// Unlock a user + UnlockUser { + /// User to unlock + username: String, + + /// Whether to reactivate the user if it had been deactivated + #[arg(long)] + reactivate: bool, + }, + + /// Register a user + /// + /// This will interactively prompt for the user's attributes unless the + /// `--yes` flag is set. It bypasses any policy check on the password, + /// email, etc. + RegisterUser { + /// Username to register + #[arg(help_heading = USER_ATTRIBUTES_HEADING, required_if_eq("yes", "true"))] + username: Option, + + /// Password to set + #[arg(short, long, help_heading = USER_ATTRIBUTES_HEADING)] + password: Option, + + /// Email to add + #[arg(short, long = "email", action = ArgAction::Append, help_heading = USER_ATTRIBUTES_HEADING)] + emails: Vec
, + + /// Upstream OAuth 2.0 provider mapping to add + #[arg( + short = 'm', + long = "upstream-provider-mapping", + value_parser = parse_upstream_provider_mapping, + action = ArgAction::Append, + value_name = "UPSTREAM_PROVIDER_ID:SUBJECT", + help_heading = USER_ATTRIBUTES_HEADING + )] + upstream_provider_mappings: Vec, + + /// Make the user an admin + #[arg(short, long, action = ArgAction::SetTrue, group = "admin-flag", help_heading = USER_ATTRIBUTES_HEADING)] + admin: bool, + + /// Make the user not an admin + #[arg(short = 'A', long, action = ArgAction::SetTrue, group = "admin-flag", help_heading = USER_ATTRIBUTES_HEADING)] + no_admin: bool, + + // Don't ask questions, just do it + #[arg(short, long, action = ArgAction::SetTrue)] + yes: bool, + + /// Set the user's display name + #[arg(short, long, help_heading = USER_ATTRIBUTES_HEADING)] + display_name: Option, + /// Don't enforce that the password provided is above the minimum + /// configured complexity. + #[clap(long)] + ignore_password_complexity: bool, + }, +} + +impl Options { + pub async fn run(self, figment: &Figment) -> anyhow::Result { + use Subcommand as SC; + let clock = SystemClock::default(); + // XXX: we should disallow SeedableRng::from_entropy + let mut rng = rand_chacha::ChaChaRng::from_entropy(); + + match self.subcommand { + SC::SetPassword { + username, + password, + ignore_complexity, + } => { + let _span = + info_span!("cli.manage.set_password", user.username = %username).entered(); + + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let passwords_config = PasswordsConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + + let mut conn = database_connection_from_config(&database_config).await?; + let password_manager = password_manager_from_config(&passwords_config).await?; + + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + let user = repo + .user() + .find_by_username(&username) + .await? + .context("User not found")?; + + if !ignore_complexity && !password_manager.is_password_complex_enough(&password)? { + error!("That password is too weak."); + return Ok(ExitCode::from(1)); + } + + let password = Zeroizing::new(password); + + let (version, hashed_password) = password_manager.hash(&mut rng, password).await?; + + repo.user_password() + .add(&mut rng, &clock, &user, version, hashed_password, None) + .await?; + + info!(%user.id, %user.username, "Password changed"); + repo.into_inner().commit().await?; + + Ok(ExitCode::SUCCESS) + } + + SC::AddEmail { username, email } => { + let _span = info_span!( + "cli.manage.add_email", + user.username = username, + user_email.email = email + ) + .entered(); + + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&database_config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + let user = repo + .user() + .find_by_username(&username) + .await? + .context("User not found")?; + + // Find any existing email address + let existing_email = repo.user_email().find(&user, &email).await?; + let email = if let Some(email) = existing_email { + info!(%email.id, "Email already exists, makring as verified"); + email + } else { + repo.user_email() + .add(&mut rng, &clock, &user, email) + .await? + }; + + repo.into_inner().commit().await?; + info!( + %user.id, + %user.username, + %email.id, + %email.email, + "Email added" + ); + + Ok(ExitCode::SUCCESS) + } + + SC::VerifyEmail { username, email } => { + let _span = info_span!( + "cli.manage.verify_email", + user.username = username, + user_email.email = email + ) + .entered(); + + tracing::warn!( + "The 'verify-email' command is deprecated and will be removed in a future version. Use 'add-email' instead." + ); + + Ok(ExitCode::SUCCESS) + } + + SC::PromoteAdmin { username } => { + let _span = + info_span!("cli.manage.promote_admin", user.username = username,).entered(); + + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&database_config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + let user = repo + .user() + .find_by_username(&username) + .await? + .context("User not found")?; + + let user = repo.user().set_can_request_admin(user, true).await?; + + repo.into_inner().commit().await?; + info!(%user.id, %user.username, "User promoted to admin"); + + Ok(ExitCode::SUCCESS) + } + + SC::DemoteAdmin { username } => { + let _span = + info_span!("cli.manage.demote_admin", user.username = username,).entered(); + + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&database_config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + let user = repo + .user() + .find_by_username(&username) + .await? + .context("User not found")?; + + let user = repo.user().set_can_request_admin(user, false).await?; + + repo.into_inner().commit().await?; + info!(%user.id, %user.username, "User is no longer admin"); + + Ok(ExitCode::SUCCESS) + } + + SC::ListAdminUsers => { + let _span = info_span!("cli.manage.list_admins").entered(); + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&database_config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + let mut cursor = Pagination::first(1000); + let filter = UserFilter::new().can_request_admin_only(); + let total = repo.user().count(filter).await?; + + info!("The following users can request admin privileges ({total} total):"); + loop { + let page = repo.user().list(filter, cursor).await?; + for edge in page.edges { + let user = edge.node; + info!(%user.id, username = %user.username); + cursor = cursor.after(edge.cursor); + } + + if !page.has_next_page { + break; + } + } + + Ok(ExitCode::SUCCESS) + } + + SC::IssueCompatibilityToken { + username, + admin, + device_id, + } => { + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let matrix_config = + MatrixConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + let http_client = mas_http::reqwest_client(); + let homeserver = + homeserver_connection_from_config(&matrix_config, http_client).await?; + let mut conn = database_connection_from_config(&database_config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + let user = repo + .user() + .find_by_username(&username) + .await? + .context("User not found")?; + + let device = if let Some(device_id) = device_id { + device_id.into() + } else { + Device::generate(&mut rng) + }; + + if let Err(e) = homeserver + .upsert_device(&user.username, device.as_str(), None) + .await + { + error!( + error = &*e, + "Could not create the device on the homeserver, aborting" + ); + + // Schedule a device sync job to remove the potential leftover device + repo.queue_job() + .schedule_job(&mut rng, &clock, SyncDevicesJob::new(&user)) + .await?; + + repo.into_inner().commit().await?; + return Ok(ExitCode::FAILURE); + } + + let compat_session = repo + .compat_session() + .add(&mut rng, &clock, &user, device, None, admin, None) + .await?; + + let token = TokenType::CompatAccessToken.generate(&mut rng); + + let compat_access_token = repo + .compat_access_token() + .add(&mut rng, &clock, &compat_session, token, None) + .await?; + + repo.into_inner().commit().await?; + + info!( + %compat_access_token.id, + %compat_session.id, + compat_session.device = compat_session.device.map(tracing::field::display), + %user.id, + %user.username, + "Compatibility token issued: {}", compat_access_token.token + ); + + Ok(ExitCode::SUCCESS) + } + + SC::IssueUserRegistrationToken { + token, + usage_limit, + unlimited, + expires_in, + } => { + let _span = info_span!("cli.manage.add_user_registration_token").entered(); + + let usage_limit = match (usage_limit, unlimited) { + (Some(usage_limit), false) => Some(usage_limit), + (None, false) => Some(1), + (None, true) => None, + (Some(_), true) => unreachable!(), // This should be handled by the clap group + }; + + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&database_config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + // Calculate expiration time if provided + let expires_at = + expires_in.map(|seconds| clock.now() + Duration::seconds(seconds.into())); + + // Generate a token if not provided + let token_str = token.unwrap_or_else(|| Alphanumeric.sample_string(&mut rng, 12)); + + // Create the token + let registration_token = repo + .user_registration_token() + .add(&mut rng, &clock, token_str, usage_limit, expires_at) + .await?; + + repo.into_inner().commit().await?; + + info!(%registration_token.id, "Created user registration token: {}", registration_token.token); + + Ok(ExitCode::SUCCESS) + } + + SC::ProvisionAllUsers => { + let _span = info_span!("cli.manage.provision_all_users").entered(); + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&database_config).await?; + let mut txn = conn.begin().await?; + + // TODO: do some pagination here + let ids: Vec = sqlx::query_scalar("SELECT user_id FROM users") + .fetch_all(&mut *txn) + .await?; + + let mut repo = PgRepository::from_conn(txn); + + for id in ids { + let id = id.into(); + info!(user.id = %id, "Scheduling provisioning job"); + let job = ProvisionUserJob::new_for_id(id); + repo.queue_job().schedule_job(&mut rng, &clock, job).await?; + } + + repo.into_inner().commit().await?; + + Ok(ExitCode::SUCCESS) + } + + SC::KillSessions { username, dry_run } => { + let _span = + info_span!("cli.manage.kill_sessions", user.username = username).entered(); + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&database_config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + let user = repo + .user() + .find_by_username(&username) + .await? + .context("User not found")?; + + let filter = CompatSessionFilter::new().for_user(&user).active_only(); + let affected = if dry_run { + repo.compat_session().count(filter).await? + } else { + repo.compat_session().finish_bulk(&clock, filter).await? + }; + + match affected { + 0 => info!("No active compatibility sessions to end"), + 1 => info!("Ended 1 active compatibility session"), + _ => info!("Ended {affected} active compatibility sessions"), + } + + let filter = OAuth2SessionFilter::new().for_user(&user).active_only(); + let affected = if dry_run { + repo.oauth2_session().count(filter).await? + } else { + repo.oauth2_session().finish_bulk(&clock, filter).await? + }; + + match affected { + 0 => info!("No active compatibility sessions to end"), + 1 => info!("Ended 1 active OAuth 2.0 session"), + _ => info!("Ended {affected} active OAuth 2.0 sessions"), + } + + let filter = BrowserSessionFilter::new().for_user(&user).active_only(); + let affected = if dry_run { + repo.browser_session().count(filter).await? + } else { + repo.browser_session().finish_bulk(&clock, filter).await? + }; + + match affected { + 0 => info!("No active browser sessions to end"), + 1 => info!("Ended 1 active browser session"), + _ => info!("Ended {affected} active browser sessions"), + } + + // Schedule a job to sync the devices of the user with the homeserver + warn!("Scheduling job to sync devices for the user"); + repo.queue_job() + .schedule_job(&mut rng, &clock, SyncDevicesJob::new(&user)) + .await?; + + let txn = repo.into_inner(); + if dry_run { + info!("Dry run, not saving"); + txn.rollback().await?; + } else { + txn.commit().await?; + } + + Ok(ExitCode::SUCCESS) + } + + SC::LockUser { + username, + deactivate, + } => { + let _span = info_span!("cli.manage.lock_user", user.username = username).entered(); + let config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + let user = repo + .user() + .find_by_username(&username) + .await? + .context("User not found")?; + + info!(%user.id, "Locking user"); + + // Even though the deactivation job will lock the user, we lock it here in case + // the worker is not running, as we don't have a good way to run a job + // synchronously yet. + let user = repo.user().lock(&clock, user).await?; + + if deactivate { + warn!(%user.id, "Scheduling user deactivation"); + repo.queue_job() + .schedule_job(&mut rng, &clock, DeactivateUserJob::new(&user, false)) + .await?; + } + + repo.into_inner().commit().await?; + + Ok(ExitCode::SUCCESS) + } + + SC::UnlockUser { + username, + reactivate, + } => { + let _span = + info_span!("cli.manage.unlock_user", user.username = username).entered(); + let config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let mut conn = database_connection_from_config(&config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + let user = repo + .user() + .find_by_username(&username) + .await? + .context("User not found")?; + + if reactivate { + warn!(%user.id, "Scheduling user reactivation"); + repo.queue_job() + .schedule_job(&mut rng, &clock, ReactivateUserJob::new(&user)) + .await?; + } else { + repo.user().unlock(user).await?; + } + + repo.into_inner().commit().await?; + + Ok(ExitCode::SUCCESS) + } + + SC::RegisterUser { + username, + password, + emails, + upstream_provider_mappings, + admin, + no_admin, + display_name, + yes, + ignore_password_complexity, + } => { + let http_client = mas_http::reqwest_client(); + let password_config = PasswordsConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let database_config = DatabaseConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let matrix_config = + MatrixConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + + let password_manager = password_manager_from_config(&password_config).await?; + let homeserver = + homeserver_connection_from_config(&matrix_config, http_client).await?; + let mut conn = database_connection_from_config(&database_config).await?; + let txn = conn.begin().await?; + let mut repo = PgRepository::from_conn(txn); + + if let Some(password) = &password + && !ignore_password_complexity + && !password_manager.is_password_complex_enough(password)? + { + error!("That password is too weak."); + return Ok(ExitCode::from(1)); + } + + // If the username is provided, check if it's available and normalize it. + let localpart = if let Some(username) = username { + check_and_normalize_username(&username, &mut repo, &homeserver) + .await? + .to_owned() + } else { + // Else we prompt for one until we get a valid one. + loop { + let username = tokio::task::spawn_blocking(|| { + Input::::with_theme(&ColorfulTheme::default()) + .with_prompt("Username") + .interact_text() + }) + .await??; + + match check_and_normalize_username(&username, &mut repo, &homeserver).await + { + Ok(localpart) => break localpart.to_owned(), + Err(e) => { + warn!("Invalid username: {e}"); + } + } + } + }; + + // Load all the upstream providers + let upstream_providers: BTreeMap<_, _> = repo + .upstream_oauth_provider() + .all_enabled() + .await? + .into_iter() + .map(|provider| (provider.id, provider)) + .collect(); + + let upstream_provider_mappings = upstream_provider_mappings + .into_iter() + .map(|mapping| { + ( + &upstream_providers[&mapping.upstream_provider_id], + mapping.subject, + ) + }) + .collect(); + + let admin = match (admin, no_admin) { + (false, false) => None, + (true, false) => Some(true), + (false, true) => Some(false), + _ => unreachable!("This should be handled by the clap group"), + }; + + // Hash the password if it's provided + let hashed_password = if let Some(password) = password { + let password = Zeroizing::new(password); + Some(password_manager.hash(&mut rng, password).await?) + } else { + None + }; + + let mut req = UserCreationRequest { + username: localpart, + hashed_password, + emails, + upstream_provider_mappings, + display_name, + admin, + }; + + let term = Term::buffered_stdout(); + loop { + req.show(&term, &homeserver)?; + + // If we're in `yes` mode, we don't prompt for actions + if yes { + break; + } + + term.write_line(&format!( + "\n{msg}:\n\n {cmd}\n", + msg = style("Non-interactive equivalent to create this user").bold(), + cmd = style(UserCreationCommand(&req)).underlined(), + ))?; + + term.flush()?; + + let action = req + .prompt_action( + password_manager.is_enabled(), + !upstream_providers.is_empty(), + ) + .await? + .context("Aborted")?; + + match action { + Action::CreateUser => break, + Action::ChangeUsername => { + req.username = loop { + let current_username = req.username.clone(); + let username = tokio::task::spawn_blocking(|| { + Input::::with_theme(&ColorfulTheme::default()) + .with_prompt("Username") + .with_initial_text(current_username) + .interact_text() + }) + .await??; + + match check_and_normalize_username( + &username, + &mut repo, + &homeserver, + ) + .await + { + Ok(localpart) => break localpart.to_owned(), + Err(e) => { + warn!("Invalid username: {e}"); + } + } + }; + } + Action::SetPassword => { + let password = tokio::task::spawn_blocking(|| { + Password::with_theme(&ColorfulTheme::default()) + .with_prompt("Password") + .with_confirmation("Confirm password", "Passwords mismatching") + .interact() + }) + .await??; + let password = Zeroizing::new(password); + req.hashed_password = + Some(password_manager.hash(&mut rng, password).await?); + } + Action::SetDisplayName => { + let display_name = tokio::task::spawn_blocking(|| { + Input::::with_theme(&ColorfulTheme::default()) + .with_prompt("Display name") + .interact() + }) + .await??; + req.display_name = Some(display_name); + } + Action::AddEmail => { + let email = tokio::task::spawn_blocking(|| { + Input::
::with_theme(&ColorfulTheme::default()) + .with_prompt("Email") + .interact_text() + }) + .await??; + req.emails.push(email); + } + Action::SetAdmin => { + let admin = tokio::task::spawn_blocking(|| { + Confirm::with_theme(&ColorfulTheme::default()) + .with_prompt("Make user admin?") + .interact() + }) + .await??; + req.admin = Some(admin); + } + Action::AddUpstreamProviderMapping => { + let providers = upstream_providers.clone(); + let provider_id = tokio::task::spawn_blocking(move || { + let providers: Vec<_> = providers.into_values().collect(); + let human_readable_providers: Vec<_> = + providers.iter().map(HumanReadable).collect(); + FuzzySelect::with_theme(&ColorfulTheme::default()) + .with_prompt("Upstream provider") + .items(&human_readable_providers) + .default(0) + .interact() + .map(move |selected| providers[selected].id) + }) + .await??; + let provider = &upstream_providers[&provider_id]; + + let subject = tokio::task::spawn_blocking(|| { + Input::::with_theme(&ColorfulTheme::default()) + .with_prompt("Subject") + .interact() + }) + .await??; + + req.upstream_provider_mappings.push((provider, subject)); + } + } + } + + if req.emails.is_empty() { + warn!("No email address provided, user will need to add one"); + } + + let confirmation = if yes { + true + } else { + tokio::task::spawn_blocking(|| { + Confirm::with_theme(&ColorfulTheme::default()) + .with_prompt("Confirm?") + .interact() + }) + .await?? + }; + + if confirmation { + let user = req.do_register(&mut repo, &mut rng, &clock).await?; + repo.into_inner().commit().await?; + info!(%user.id, "User registered"); + } else { + warn!("Aborted"); + } + + Ok(ExitCode::SUCCESS) + } + } + } +} + +/// A wrapper to display some objects differently +#[derive(Debug, Clone, Copy)] +struct HumanReadable(T); + +impl std::fmt::Display for HumanReadable<&UpstreamOAuthProvider> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let provider = self.0; + if let Some(human_name) = &provider.human_name { + write!(f, "{} ({})", human_name, provider.id) + } else if let Some(issuer) = &provider.issuer { + write!(f, "{} ({})", issuer, provider.id) + } else { + write!(f, "{}", provider.id) + } + } +} + +async fn check_and_normalize_username<'a>( + localpart_or_mxid: &'a str, + repo: &mut dyn RepositoryAccess, + homeserver: &dyn HomeserverConnection, +) -> anyhow::Result<&'a str> { + // XXX: this is a very basic MXID to localpart conversion + // Strip any leading '@' + let mut localpart = localpart_or_mxid.trim_start_matches('@'); + + // Strip any trailing ':homeserver' + if let Some(index) = localpart.find(':') { + localpart = &localpart[..index]; + } + + if localpart.is_empty() { + return Err(anyhow::anyhow!("Username cannot be empty")); + } + + if repo.user().exists(localpart).await? { + return Err(anyhow::anyhow!("User already exists")); + } + + if !homeserver.is_localpart_available(localpart).await? { + return Err(anyhow::anyhow!("Username not available on homeserver")); + } + + Ok(localpart) +} + +struct UserCreationRequest<'a> { + username: String, + hashed_password: Option<(u16, String)>, + emails: Vec
, + upstream_provider_mappings: Vec<(&'a UpstreamOAuthProvider, String)>, + display_name: Option, + admin: Option, +} + +impl UserCreationRequest<'_> { + // Get a list of the possible actions + fn possible_actions( + &self, + has_password_auth: bool, + has_upstream_providers: bool, + ) -> Vec { + let mut actions = vec![Action::CreateUser, Action::ChangeUsername, Action::AddEmail]; + + if has_password_auth && self.hashed_password.is_none() { + actions.push(Action::SetPassword); + } + + if has_upstream_providers { + actions.push(Action::AddUpstreamProviderMapping); + } + + if self.admin.is_none() { + actions.push(Action::SetAdmin); + } + + if self.display_name.is_none() { + actions.push(Action::SetDisplayName); + } + + actions + } + + /// Prompt for the next action + async fn prompt_action( + &self, + has_password_auth: bool, + has_upstream_providers: bool, + ) -> anyhow::Result> { + let actions = self.possible_actions(has_password_auth, has_upstream_providers); + tokio::task::spawn_blocking(move || { + let index = FuzzySelect::with_theme(&ColorfulTheme::default()) + .with_prompt("What do you want to do next? ( to abort)") + .items(&actions) + .default(0) + .interact_opt()?; + Ok(index.map(|index| actions[index])) + }) + .await? + } + + /// Show the user creation request in a human-readable format + fn show(&self, term: &Term, homeserver: &dyn HomeserverConnection) -> std::io::Result<()> { + let value_style = Style::new().green(); + let key_style = Style::new().bold(); + let warning_style = Style::new().italic().red().bright(); + let username = &self.username; + let mxid = homeserver.mxid(username); + + term.write_line(&style("User attributes").bold().underlined().to_string())?; + + macro_rules! display { + ($key:expr, $value:expr) => { + term.write_line(&format!( + "{key}: {value}", + key = key_style.apply_to(pad_str($key, 17, Alignment::Right, None)), + value = value_style.apply_to($value) + ))?; + }; + } + + display!("Username", username); + display!("Matrix ID", mxid); + if let Some(display_name) = &self.display_name { + display!("Display name", display_name); + } + + if self.hashed_password.is_some() { + display!("Password", "********"); + } + + for (provider, subject) in &self.upstream_provider_mappings { + let provider = HumanReadable(*provider); + display!("Upstream account", format!("{provider} : {subject:?}")); + } + + for email in &self.emails { + display!("Email", email); + } + + if self.emails.is_empty() { + term.write_line( + &warning_style + .apply_to("No email address provided, user will be prompted to add one") + .to_string(), + )?; + } + + if self.hashed_password.is_none() && self.upstream_provider_mappings.is_empty() { + term.write_line( + &warning_style.apply_to("No password or upstream provider mapping provided, user will not be able to log in") + .to_string(), + )?; + } + + if let Some(admin) = self.admin { + display!("Can request admin", admin); + } + + term.flush()?; + + Ok(()) + } + + /// Submit the user creation request + async fn do_register( + self, + repo: &mut dyn RepositoryAccess, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + ) -> Result { + let Self { + username, + hashed_password, + emails, + upstream_provider_mappings, + display_name, + admin, + } = self; + let mut user = repo.user().add(rng, clock, username).await?; + + if let Some((version, hashed_password)) = hashed_password { + repo.user_password() + .add(rng, clock, &user, version, hashed_password, None) + .await?; + } + + for email in emails { + repo.user_email() + .add(rng, clock, &user, email.to_string()) + .await?; + } + + for (provider, subject) in upstream_provider_mappings { + // Note that we don't pass a human_account_name here, as we don't ask for it + let link = repo + .upstream_oauth_link() + .add(rng, clock, provider, subject, None) + .await?; + + repo.upstream_oauth_link() + .associate_to_user(&link, &user) + .await?; + } + + if let Some(admin) = admin { + user = repo.user().set_can_request_admin(user, admin).await?; + } + + let mut provision_job = ProvisionUserJob::new(&user); + if let Some(display_name) = display_name { + provision_job = provision_job.set_display_name(display_name); + } + + repo.queue_job() + .schedule_job(rng, clock, provision_job) + .await?; + + Ok(user) + } +} + +#[derive(Debug, Clone, Copy)] +enum Action { + CreateUser, + ChangeUsername, + SetPassword, + SetDisplayName, + AddEmail, + SetAdmin, + AddUpstreamProviderMapping, +} + +impl std::fmt::Display for Action { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Action::CreateUser => write!(f, "Create the user"), + Action::ChangeUsername => write!(f, "Change the username"), + Action::SetPassword => write!(f, "Set a password"), + Action::AddEmail => write!(f, "Add email"), + Action::SetDisplayName => write!(f, "Set a display name"), + Action::SetAdmin => write!(f, "Set the admin status"), + Action::AddUpstreamProviderMapping => write!(f, "Add upstream provider mapping"), + } + } +} + +/// A wrapper to display the user creation request as a command +struct UserCreationCommand<'a>(&'a UserCreationRequest<'a>); + +impl std::fmt::Display for UserCreationCommand<'_> { + fn fmt(&self, w: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let command = super::Options::command(); + let manage = command.find_subcommand("manage").unwrap(); + let register_user = manage.find_subcommand("register-user").unwrap(); + let yes_arg = ®ister_user[&clap::Id::from("yes")]; + let password_arg = ®ister_user[&clap::Id::from("password")]; + let email_arg = ®ister_user[&clap::Id::from("emails")]; + let upstream_provider_mapping_arg = + ®ister_user[&clap::Id::from("upstream_provider_mappings")]; + let display_name_arg = ®ister_user[&clap::Id::from("display_name")]; + let admin_arg = ®ister_user[&clap::Id::from("admin")]; + let no_admin_arg = ®ister_user[&clap::Id::from("no_admin")]; + + write!( + w, + "{} {} {} --{} {}", + command.get_name(), + manage.get_name(), + register_user.get_name(), + yes_arg.get_long().unwrap(), + self.0.username, + )?; + + for email in &self.0.emails { + let email: &str = email.as_ref(); + write!(w, " --{} {email:?}", email_arg.get_long().unwrap())?; + } + + if let Some(display_name) = &self.0.display_name { + write!( + w, + " --{} {:?}", + display_name_arg.get_long().unwrap(), + display_name + )?; + } + + if self.0.hashed_password.is_some() { + write!(w, " --{} $PASSWORD", password_arg.get_long().unwrap())?; + } + + for (provider, subject) in &self.0.upstream_provider_mappings { + let mapping = format!("{}:{}", provider.id, subject); + write!( + w, + " --{} {mapping:?}", + upstream_provider_mapping_arg.get_long().unwrap(), + )?; + } + + match self.0.admin { + Some(true) => write!(w, " --{}", admin_arg.get_long().unwrap())?, + Some(false) => write!(w, " --{}", no_admin_arg.get_long().unwrap())?, + None => {} + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/mod.rs b/matrix-authentication-service/crates/cli/src/commands/mod.rs new file mode 100644 index 00000000..e8cef3b1 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/mod.rs @@ -0,0 +1,109 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::process::ExitCode; + +use camino::Utf8PathBuf; +use clap::Parser; +use figment::{ + Figment, + providers::{Env, Format, Yaml}, +}; + +mod config; +mod database; +mod debug; +mod doctor; +mod manage; +mod server; +mod syn2mas; +mod templates; +mod worker; + +#[derive(Parser, Debug)] +enum Subcommand { + /// Configuration-related commands + Config(self::config::Options), + + /// Manage the database + Database(self::database::Options), + + /// Runs the web server + Server(self::server::Options), + + /// Run the worker + Worker(self::worker::Options), + + /// Manage the instance + Manage(self::manage::Options), + + /// Templates-related commands + Templates(self::templates::Options), + + /// Debug utilities + #[clap(hide = true)] + Debug(self::debug::Options), + + /// Run diagnostics on the deployment + Doctor(self::doctor::Options), + + /// Migrate from Synapse's built-in auth system to MAS. + #[clap(name = "syn2mas")] + // Box<> is to work around a 'large size difference between variants' lint + Syn2Mas(Box), +} + +#[derive(Parser, Debug)] +#[command(version = crate::VERSION)] +pub struct Options { + /// Path to the configuration file + #[arg(short, long, global = true, action = clap::ArgAction::Append)] + config: Vec, + + #[command(subcommand)] + subcommand: Option, +} + +impl Options { + pub async fn run(self, figment: &Figment) -> anyhow::Result { + use Subcommand as S; + // We Box the futures for each subcommand so that we avoid this function being + // big on the stack all the time + match self.subcommand { + Some(S::Config(c)) => Box::pin(c.run(figment)).await, + Some(S::Database(c)) => Box::pin(c.run(figment)).await, + Some(S::Server(c)) => Box::pin(c.run(figment)).await, + Some(S::Worker(c)) => Box::pin(c.run(figment)).await, + Some(S::Manage(c)) => Box::pin(c.run(figment)).await, + Some(S::Templates(c)) => Box::pin(c.run(figment)).await, + Some(S::Debug(c)) => Box::pin(c.run(figment)).await, + Some(S::Doctor(c)) => Box::pin(c.run(figment)).await, + Some(S::Syn2Mas(c)) => Box::pin(c.run(figment)).await, + None => Box::pin(self::server::Options::default().run(figment)).await, + } + } + + /// Get a [`Figment`] instance with the configuration loaded + pub fn figment(&self) -> Figment { + let configs = if self.config.is_empty() { + // Read the MAS_CONFIG environment variable + std::env::var("MAS_CONFIG") + // Default to "config.yaml" + .unwrap_or_else(|_| "config.yaml".to_owned()) + // Split the file list on `:` + .split(':') + .map(Utf8PathBuf::from) + .collect() + } else { + self.config.clone() + }; + let base = Figment::new().merge(Env::prefixed("MAS_").split("_")); + + configs + .into_iter() + .fold(base, |f, path| f.admerge(Yaml::file(path))) + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/server.rs b/matrix-authentication-service/crates/cli/src/commands/server.rs new file mode 100644 index 00000000..b72d4811 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/server.rs @@ -0,0 +1,341 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{process::ExitCode, sync::Arc, time::Duration}; + +use anyhow::Context; +use clap::Parser; +use figment::Figment; +use itertools::Itertools; +use mas_config::{ + AppConfig, ClientsConfig, ConfigurationSection, ConfigurationSectionExt, UpstreamOAuth2Config, +}; +use mas_context::LogContext; +use mas_data_model::SystemClock; +use mas_handlers::{ActivityTracker, CookieManager, Limiter, MetadataCache}; +use mas_listener::server::Server; +use mas_router::UrlBuilder; +use mas_storage_pg::PgRepositoryFactory; +use tracing::{info, info_span, warn}; + +use crate::{ + app_state::AppState, + lifecycle::LifecycleManager, + util::{ + database_pool_from_config, homeserver_connection_from_config, + load_policy_factory_dynamic_data_continuously, mailer_from_config, + password_manager_from_config, policy_factory_from_config, site_config_from_config, + templates_from_config, test_mailer_in_background, + }, +}; + +#[allow(clippy::struct_excessive_bools)] +#[derive(Parser, Debug, Default)] +pub(super) struct Options { + /// Do not apply pending database migrations on start + #[arg(long)] + no_migrate: bool, + + /// DEPRECATED: default is to apply pending migrations, use `--no-migrate` + /// to disable + #[arg(long, hide = true)] + migrate: bool, + + /// Do not start the task worker + #[arg(long)] + no_worker: bool, + + /// Do not sync the configuration with the database + #[arg(long)] + no_sync: bool, +} + +impl Options { + pub async fn run(self, figment: &Figment) -> anyhow::Result { + let span = info_span!("cli.run.init").entered(); + let mut shutdown = LifecycleManager::new()?; + let config = AppConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + + info!(version = crate::VERSION, "Starting up"); + + if self.migrate { + warn!( + "The `--migrate` flag is deprecated and will be removed in a future release. Please use `--no-migrate` to disable automatic migrations on startup." + ); + } + + // Connect to the database + info!("Connecting to the database"); + let pool = database_pool_from_config(&config.database).await?; + + if self.no_migrate { + let mut conn = pool.acquire().await?; + let pending_migrations = mas_storage_pg::pending_migrations(&mut conn).await?; + if !pending_migrations.is_empty() { + // Refuse to start if there are pending migrations + return Err(anyhow::anyhow!( + "The server is running with `--no-migrate` but there are pending migrations. Please run them first with `mas-cli database migrate`, or omit the `--no-migrate` flag to apply them automatically on startup." + )); + } + } else { + info!("Running pending database migrations"); + let mut conn = pool.acquire().await?; + mas_storage_pg::migrate(&mut conn) + .await + .context("could not run migrations")?; + } + + let encrypter = config.secrets.encrypter().await?; + + if self.no_sync { + info!("Skipping configuration sync"); + } else { + // Sync the configuration with the database + let mut conn = pool.acquire().await?; + let clients_config = + ClientsConfig::extract_or_default(figment).map_err(anyhow::Error::from_boxed)?; + let upstream_oauth2_config = UpstreamOAuth2Config::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + + crate::sync::config_sync( + upstream_oauth2_config, + clients_config, + &mut conn, + &encrypter, + &SystemClock::default(), + false, + false, + ) + .await + .context("could not sync the configuration with the database")?; + } + + // Initialize the key store + let key_store = config + .secrets + .key_store() + .await + .context("could not import keys from config")?; + + let cookie_manager = CookieManager::derive_from( + config.http.public_base.clone(), + &config.secrets.encryption().await?, + ); + + // Load and compile the WASM policies (and fallback to the default embedded one) + info!("Loading and compiling the policy module"); + let policy_factory = + policy_factory_from_config(&config.policy, &config.matrix, &config.experimental) + .await?; + let policy_factory = Arc::new(policy_factory); + + load_policy_factory_dynamic_data_continuously( + &policy_factory, + PgRepositoryFactory::new(pool.clone()).boxed(), + shutdown.soft_shutdown_token(), + shutdown.task_tracker(), + ) + .await?; + + let url_builder = UrlBuilder::new( + config.http.public_base.clone(), + config.http.issuer.clone(), + None, + ); + + // Load the site configuration + let site_config = site_config_from_config( + &config.branding, + &config.matrix, + &config.experimental, + &config.passwords, + &config.account, + &config.captcha, + )?; + + // Load and compile the templates + let templates = templates_from_config( + &config.templates, + &site_config, + &url_builder, + // Don't use strict mode in production yet + false, + // Don't stabilise in production + false, + ) + .await?; + shutdown.register_reloadable(&templates); + + let http_client = mas_http::reqwest_client(); + + let homeserver_connection = + homeserver_connection_from_config(&config.matrix, http_client.clone()).await?; + + if !self.no_worker { + let mailer = mailer_from_config(&config.email, &templates)?; + test_mailer_in_background(&mailer, Duration::from_secs(30)); + + info!("Starting task worker"); + mas_tasks::init_and_run( + PgRepositoryFactory::new(pool.clone()), + SystemClock::default(), + &mailer, + homeserver_connection.clone(), + url_builder.clone(), + &site_config, + shutdown.soft_shutdown_token(), + shutdown.task_tracker(), + ) + .await?; + } + + let listeners_config = config.http.listeners.clone(); + + let password_manager = password_manager_from_config(&config.passwords).await?; + + // The upstream OIDC metadata cache + let metadata_cache = MetadataCache::new(); + + // Initialize the activity tracker + // Activity is flushed every minute + let activity_tracker = ActivityTracker::new( + PgRepositoryFactory::new(pool.clone()).boxed(), + Duration::from_secs(60), + shutdown.task_tracker(), + shutdown.soft_shutdown_token(), + ); + + shutdown.register_reloadable(&activity_tracker); + + let trusted_proxies = config.http.trusted_proxies.clone(); + + // Build a rate limiter. + // This should not raise an error here as the config should already have been + // validated. + let limiter = Limiter::new(&config.rate_limiting) + .context("rate-limiting configuration is not valid")?; + + // Explicitly the config to properly zeroize secret keys + drop(config); + + limiter.start(); + + let graphql_schema = mas_handlers::graphql_schema( + PgRepositoryFactory::new(pool.clone()).boxed(), + &policy_factory, + homeserver_connection.clone(), + site_config.clone(), + password_manager.clone(), + url_builder.clone(), + limiter.clone(), + ); + + let state = { + let mut s = AppState { + repository_factory: PgRepositoryFactory::new(pool), + templates, + key_store, + cookie_manager, + encrypter, + url_builder, + homeserver_connection, + policy_factory, + graphql_schema, + http_client, + password_manager, + metadata_cache, + site_config, + activity_tracker, + trusted_proxies, + limiter, + }; + s.init_metrics(); + s.init_metadata_cache(); + s + }; + + let mut fd_manager = listenfd::ListenFd::from_env(); + + let servers: Vec> = listeners_config + .into_iter() + .map(|config| { + // Let's first grab all the listeners + let listeners = crate::server::build_listeners(&mut fd_manager, &config.binds)?; + + // Load the TLS config + let tls_config = if let Some(tls_config) = config.tls.as_ref() { + let tls_config = crate::server::build_tls_server_config(tls_config)?; + Some(Arc::new(tls_config)) + } else { + None + }; + + // and build the router + let router = crate::server::build_router( + state.clone(), + &config.resources, + config.prefix.as_deref(), + config.name.as_deref(), + ); + + + // Display some informations about where we'll be serving connections + let proto = if config.tls.is_some() { "https" } else { "http" }; + let prefix = config.prefix.unwrap_or_default(); + let addresses= listeners + .iter() + .map(|listener| { + if let Ok(addr) = listener.local_addr() { + format!("{proto}://{addr:?}{prefix}") + } else { + warn!("Could not get local address for listener, something might be wrong!"); + format!("{proto}://???{prefix}") + } + }) + .join(", "); + + let additional = if config.proxy_protocol { + "(with Proxy Protocol)" + } else { + "" + }; + + info!( + "Listening on {addresses} with resources {resources:?} {additional}", + resources = &config.resources + ); + + anyhow::Ok(listeners.into_iter().map(move |listener| { + let mut server = Server::new(listener, router.clone()); + if let Some(tls_config) = &tls_config { + server = server.with_tls(tls_config.clone()); + } + if config.proxy_protocol { + server = server.with_proxy(); + } + server + })) + }) + .flatten_ok() + .collect::, _>>()?; + + span.exit(); + + shutdown + .task_tracker() + .spawn(LogContext::new("run-servers").run(|| { + mas_listener::server::run_servers( + servers, + shutdown.soft_shutdown_token(), + shutdown.hard_shutdown_token(), + ) + })); + + let exit_code = shutdown.run().await; + + Ok(exit_code) + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/syn2mas.rs b/matrix-authentication-service/crates/cli/src/commands/syn2mas.rs new file mode 100644 index 00000000..4ea55a21 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/syn2mas.rs @@ -0,0 +1,319 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, process::ExitCode, time::Duration}; + +use anyhow::Context; +use camino::Utf8PathBuf; +use clap::Parser; +use figment::Figment; +use mas_config::{ + ConfigurationSection, ConfigurationSectionExt, DatabaseConfig, MatrixConfig, SyncConfig, + UpstreamOAuth2Config, +}; +use mas_data_model::SystemClock; +use rand::thread_rng; +use sqlx::{Connection, Either, PgConnection, postgres::PgConnectOptions, types::Uuid}; +use syn2mas::{ + LockedMasDatabase, MasWriter, Progress, ProgressStage, SynapseReader, synapse_config, +}; +use tracing::{Instrument, error, info}; + +use crate::util::{DatabaseConnectOptions, database_connection_from_config_with_options}; + +/// The exit code used by `syn2mas check` and `syn2mas migrate` when there are +/// errors preventing migration. +const EXIT_CODE_CHECK_ERRORS: u8 = 10; + +/// The exit code used by `syn2mas check` when there are warnings which should +/// be considered prior to migration. +const EXIT_CODE_CHECK_WARNINGS: u8 = 11; + +#[derive(Parser, Debug)] +pub(super) struct Options { + #[command(subcommand)] + subcommand: Subcommand, + + /// Path to the Synapse configuration (in YAML format). + /// May be specified multiple times if multiple Synapse configuration files + /// are in use. + #[clap(long = "synapse-config", global = true)] + synapse_configuration_files: Vec, + + /// Override the Synapse database URI. + /// syn2mas normally loads the Synapse database connection details from the + /// Synapse configuration. However, it may sometimes be necessary to + /// override the database URI and in that case this flag can be used. + /// + /// Should be a connection URI of the following general form: + /// ```text + /// postgresql://[user[:password]@][host][:port][/dbname][?param1=value1&...] + /// ``` + /// To use a UNIX socket at a custom path, the host should be a path to a + /// socket, but in the URI string it must be URI-encoded by replacing + /// `/` with `%2F`. + /// + /// Finally, any missing values will be loaded from the libpq-compatible + /// environment variables `PGHOST`, `PGPORT`, `PGUSER`, `PGDATABASE`, + /// `PGPASSWORD`, etc. It is valid to specify the URL `postgresql:` and + /// configure all values through those environment variables. + #[clap(long = "synapse-database-uri", global = true)] + synapse_database_uri: Option, + + /// Make missing auth providers in Synapse config warnings instead of + /// errors. If this flag is set, and we find `auth_provider` values in + /// the Synapse `user_external_ids` table, that are not configured in + /// the Synapse OIDC configuration, instead of erroring we will just + /// output warnings. + #[clap(long = "ignore-missing-auth-providers", global = true)] + ignore_missing_auth_providers: bool, +} + +#[derive(Parser, Debug)] +enum Subcommand { + /// Check the setup for potential problems before running a migration. + /// + /// It is OK for Synapse to be online during these checks. + Check, + + /// Perform a migration. Synapse must be offline during this process. + Migrate { + /// Perform a dry-run migration, which is safe to run with Synapse + /// running, and will restore the MAS database to an empty state. + /// + /// This still *does* write to the MAS database, making it more + /// realistic compared to the final migration. + #[clap(long)] + dry_run: bool, + }, +} + +/// The number of parallel writing transactions active against the MAS database. +const NUM_WRITER_CONNECTIONS: usize = 8; + +impl Options { + #[tracing::instrument("cli.syn2mas.run", skip_all)] + pub async fn run(self, figment: &Figment) -> anyhow::Result { + if self.synapse_configuration_files.is_empty() { + error!("Please specify the path to the Synapse configuration file(s)."); + return Ok(ExitCode::FAILURE); + } + + let synapse_config = synapse_config::Config::load(&self.synapse_configuration_files) + .map_err(anyhow::Error::from_boxed) + .context("Failed to load Synapse configuration")?; + + // Establish a connection to Synapse's Postgres database + let syn_connection_options = if let Some(db_override) = self.synapse_database_uri { + db_override + } else { + synapse_config + .database + .to_sqlx_postgres() + .context("Synapse database configuration is invalid, cannot migrate.")? + }; + let mut syn_conn = PgConnection::connect_with(&syn_connection_options) + .await + .context("could not connect to Synapse Postgres database")?; + + let config = + DatabaseConfig::extract_or_default(figment).map_err(anyhow::Error::from_boxed)?; + + let mut mas_connection = database_connection_from_config_with_options( + &config, + &DatabaseConnectOptions { + log_slow_statements: false, + }, + ) + .await?; + + mas_storage_pg::migrate(&mut mas_connection) + .await + .context("could not run migrations")?; + + if matches!(&self.subcommand, Subcommand::Migrate { .. }) { + // First perform a config sync + // This is crucial to ensure we register upstream OAuth providers + // in the MAS database + let config = SyncConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + let clock = SystemClock::default(); + let encrypter = config.secrets.encrypter().await?; + + crate::sync::config_sync( + config.upstream_oauth2, + config.clients, + &mut mas_connection, + &encrypter, + &clock, + // Don't prune — we don't want to be unnecessarily destructive + false, + // Not a dry run — we do want to create the providers in the database + false, + ) + .await + .context("could not sync the configuration with the database")?; + } + + let Either::Left(mut mas_connection) = LockedMasDatabase::try_new(mas_connection) + .await + .context("failed to issue query to lock database")? + else { + error!("Failed to acquire syn2mas lock on the database."); + error!("This likely means that another syn2mas instance is already running!"); + return Ok(ExitCode::FAILURE); + }; + + // Check configuration + let (mut check_warnings, mut check_errors) = syn2mas::synapse_config_check(&synapse_config); + { + let (extra_warnings, extra_errors) = + syn2mas::synapse_config_check_against_mas_config(&synapse_config, figment).await?; + check_warnings.extend(extra_warnings); + check_errors.extend(extra_errors); + } + + // Check databases + syn2mas::mas_pre_migration_checks(&mut mas_connection).await?; + { + let (extra_warnings, extra_errors) = syn2mas::synapse_database_check( + &mut syn_conn, + &synapse_config, + figment, + self.ignore_missing_auth_providers, + ) + .await?; + check_warnings.extend(extra_warnings); + check_errors.extend(extra_errors); + } + + // Display errors and warnings + if !check_errors.is_empty() { + eprintln!("\n\n===== Errors ====="); + eprintln!("These issues prevent migrating from Synapse to MAS right now:\n"); + for error in &check_errors { + eprintln!("• {error}\n"); + } + } + if !check_warnings.is_empty() { + eprintln!("\n\n===== Warnings ====="); + eprintln!( + "These potential issues should be considered before migrating from Synapse to MAS right now:\n" + ); + for warning in &check_warnings { + eprintln!("• {warning}\n"); + } + } + + // Do not proceed if there are any errors + if !check_errors.is_empty() { + return Ok(ExitCode::from(EXIT_CODE_CHECK_ERRORS)); + } + + match self.subcommand { + Subcommand::Check => { + if !check_warnings.is_empty() { + return Ok(ExitCode::from(EXIT_CODE_CHECK_WARNINGS)); + } + + println!("Check completed successfully with no errors or warnings."); + + Ok(ExitCode::SUCCESS) + } + + Subcommand::Migrate { dry_run } => { + let provider_id_mappings: HashMap = { + let mas_oauth2 = UpstreamOAuth2Config::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + + mas_oauth2 + .providers + .iter() + .filter_map(|provider| { + let synapse_idp_id = provider.synapse_idp_id.clone()?; + Some((synapse_idp_id, Uuid::from(provider.id))) + }) + .collect() + }; + + // TODO how should we handle warnings at this stage? + + let reader = SynapseReader::new(&mut syn_conn, dry_run).await?; + let writer_mas_connections = + futures_util::future::try_join_all((0..NUM_WRITER_CONNECTIONS).map(|_| { + database_connection_from_config_with_options( + &config, + &DatabaseConnectOptions { + log_slow_statements: false, + }, + ) + })) + .instrument(tracing::info_span!("syn2mas.mas_writer_connections")) + .await?; + let writer = + MasWriter::new(mas_connection, writer_mas_connections, dry_run).await?; + + let clock = SystemClock::default(); + // TODO is this rng ok? + #[allow(clippy::disallowed_methods)] + let mut rng = thread_rng(); + + let progress = Progress::default(); + + let occasional_progress_logger_task = + tokio::spawn(occasional_progress_logger(progress.clone())); + + let mas_matrix = + MatrixConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + syn2mas::migrate( + reader, + writer, + mas_matrix.homeserver, + &clock, + &mut rng, + provider_id_mappings, + &progress, + self.ignore_missing_auth_providers, + ) + .await?; + + occasional_progress_logger_task.abort(); + + Ok(ExitCode::SUCCESS) + } + } + } +} + +/// Logs progress every 5 seconds, as a lightweight alternative to a progress +/// bar. For most deployments, the migration will not take 5 seconds so this +/// will not be relevant. In other cases, this will give the operator an idea of +/// what's going on. +async fn occasional_progress_logger(progress: Progress) { + loop { + tokio::time::sleep(Duration::from_secs(5)).await; + match &**progress.get_current_stage() { + ProgressStage::SettingUp => { + info!(name: "progress", "still setting up"); + } + ProgressStage::MigratingData { + entity, + counter, + approx_count, + } => { + let migrated = counter.migrated(); + let skipped = counter.skipped(); + #[allow(clippy::cast_precision_loss)] + let percent = (f64::from(migrated + skipped) / *approx_count as f64) * 100.0; + info!(name: "progress", "migrating {entity}: {migrated} ({skipped} skipped) /~{approx_count} (~{percent:.1}%)"); + } + ProgressStage::RebuildIndex { index_name } => { + info!(name: "progress", "still waiting for rebuild of index {index_name}"); + } + ProgressStage::RebuildConstraint { constraint_name } => { + info!(name: "progress", "still waiting for rebuild of constraint {constraint_name}"); + } + } + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/templates.rs b/matrix-authentication-service/crates/cli/src/commands/templates.rs new file mode 100644 index 00000000..1199e1f9 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/templates.rs @@ -0,0 +1,145 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{fmt::Write, process::ExitCode}; + +use anyhow::{Context as _, bail}; +use camino::Utf8PathBuf; +use chrono::DateTime; +use clap::Parser; +use figment::Figment; +use mas_config::{ + AccountConfig, BrandingConfig, CaptchaConfig, ConfigurationSection, ConfigurationSectionExt, + ExperimentalConfig, MatrixConfig, PasswordsConfig, TemplatesConfig, +}; +use mas_data_model::{Clock, SystemClock}; +use rand::SeedableRng; +use tracing::info_span; + +use crate::util::{site_config_from_config, templates_from_config}; + +#[derive(Parser, Debug)] +pub(super) struct Options { + #[clap(subcommand)] + subcommand: Subcommand, +} + +#[derive(Parser, Debug)] +enum Subcommand { + /// Check that the templates specified in the config are valid + Check { + /// If set, templates will be rendered to this directory. + /// The directory must either not exist or be empty. + #[arg(long = "out-dir")] + out_dir: Option, + + /// Attempt to remove 'unstable' template input data such as asset + /// hashes, in order to make renders more reproducible between + /// versions. + #[arg(long = "stabilise")] + stabilise: bool, + }, +} + +impl Options { + pub async fn run(self, figment: &Figment) -> anyhow::Result { + use Subcommand as SC; + match self.subcommand { + SC::Check { out_dir, stabilise } => { + let _span = info_span!("cli.templates.check").entered(); + + let template_config = TemplatesConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let branding_config = BrandingConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let matrix_config = + MatrixConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + let experimental_config = ExperimentalConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let password_config = PasswordsConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let account_config = AccountConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + let captcha_config = CaptchaConfig::extract_or_default(figment) + .map_err(anyhow::Error::from_boxed)?; + + let now = if stabilise { + DateTime::from_timestamp_secs(1_446_823_992).unwrap() + } else { + SystemClock::default().now() + }; + let rng = if stabilise { + rand_chacha::ChaChaRng::from_seed([42; 32]) + } else { + // XXX: we should disallow SeedableRng::from_entropy + rand_chacha::ChaChaRng::from_entropy() + }; + let url_builder = + mas_router::UrlBuilder::new("https://example.com/".parse()?, None, None); + let site_config = site_config_from_config( + &branding_config, + &matrix_config, + &experimental_config, + &password_config, + &account_config, + &captcha_config, + )?; + let templates = templates_from_config( + &template_config, + &site_config, + &url_builder, + // Use strict mode in template checks + true, + stabilise, + ) + .await?; + let all_renders = templates.check_render(now, &rng)?; + + if let Some(out_dir) = out_dir { + // Save renders to disk. + if out_dir.exists() { + let mut read_dir = + tokio::fs::read_dir(&out_dir).await.with_context(|| { + format!("could not read {out_dir} to check it's empty") + })?; + if read_dir.next_entry().await?.is_some() { + bail!("Render directory {out_dir} is not empty, refusing to write."); + } + } else { + tokio::fs::create_dir(&out_dir) + .await + .with_context(|| format!("could not create {out_dir}"))?; + } + + for ((template, sample_identifier), template_render) in &all_renders { + let (template_filename_base, template_ext) = + template.rsplit_once('.').unwrap_or((template, "txt")); + let template_filename_base = template_filename_base.replace('/', "_"); + + // Make a string like `-index=0-browser-session=0-locale=fr` + let sample_suffix = { + let mut s = String::new(); + for (k, v) in &sample_identifier.components { + write!(s, "-{k}={v}")?; + } + s + }; + + let render_path = out_dir.join(format!( + "{template_filename_base}{sample_suffix}.{template_ext}" + )); + + tokio::fs::write(&render_path, template_render.as_bytes()) + .await + .with_context(|| format!("could not write render to {render_path}"))?; + } + } + + Ok(ExitCode::SUCCESS) + } + } + } +} diff --git a/matrix-authentication-service/crates/cli/src/commands/worker.rs b/matrix-authentication-service/crates/cli/src/commands/worker.rs new file mode 100644 index 00000000..6dfcd27f --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/commands/worker.rs @@ -0,0 +1,93 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{process::ExitCode, time::Duration}; + +use clap::Parser; +use figment::Figment; +use mas_config::{AppConfig, ConfigurationSection}; +use mas_data_model::SystemClock; +use mas_router::UrlBuilder; +use mas_storage_pg::PgRepositoryFactory; +use tracing::{info, info_span}; + +use crate::{ + lifecycle::LifecycleManager, + util::{ + database_pool_from_config, homeserver_connection_from_config, mailer_from_config, + site_config_from_config, templates_from_config, test_mailer_in_background, + }, +}; + +#[derive(Parser, Debug, Default)] +pub(super) struct Options {} + +impl Options { + pub async fn run(self, figment: &Figment) -> anyhow::Result { + let shutdown = LifecycleManager::new()?; + let span = info_span!("cli.worker.init").entered(); + let config = AppConfig::extract(figment).map_err(anyhow::Error::from_boxed)?; + + // Connect to the database + info!("Connecting to the database"); + let pool = database_pool_from_config(&config.database).await?; + + let url_builder = UrlBuilder::new( + config.http.public_base.clone(), + config.http.issuer.clone(), + None, + ); + + // Load the site configuration + let site_config = site_config_from_config( + &config.branding, + &config.matrix, + &config.experimental, + &config.passwords, + &config.account, + &config.captcha, + )?; + + // Load and compile the templates + let templates = templates_from_config( + &config.templates, + &site_config, + &url_builder, + // Don't use strict mode on task workers for now + false, + // Don't stabilise in production + false, + ) + .await?; + + let mailer = mailer_from_config(&config.email, &templates)?; + test_mailer_in_background(&mailer, Duration::from_secs(30)); + + let http_client = mas_http::reqwest_client(); + let conn = homeserver_connection_from_config(&config.matrix, http_client).await?; + + drop(config); + + info!("Starting task scheduler"); + mas_tasks::init_and_run( + PgRepositoryFactory::new(pool.clone()), + SystemClock::default(), + &mailer, + conn, + url_builder, + &site_config, + shutdown.soft_shutdown_token(), + shutdown.task_tracker(), + ) + .await?; + + span.exit(); + + let exit_code = shutdown.run().await; + + Ok(exit_code) + } +} diff --git a/matrix-authentication-service/crates/cli/src/lifecycle.rs b/matrix-authentication-service/crates/cli/src/lifecycle.rs new file mode 100644 index 00000000..e4416293 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/lifecycle.rs @@ -0,0 +1,239 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{process::ExitCode, time::Duration}; + +use futures_util::future::{BoxFuture, Either}; +use mas_handlers::ActivityTracker; +use mas_templates::Templates; +use tokio::signal::unix::{Signal, SignalKind}; +use tokio_util::{sync::CancellationToken, task::TaskTracker}; + +/// A helper to manage the lifecycle of the service, inclusing handling graceful +/// shutdowns and configuration reloads. +/// +/// It will listen for SIGTERM and SIGINT signals, and will trigger a soft +/// shutdown on the first signal, and a hard shutdown on the second signal or +/// after a timeout. +/// +/// Users of this manager should use the `soft_shutdown_token` to react to a +/// soft shutdown, which should gracefully finish requests and close +/// connections, and the `hard_shutdown_token` to react to a hard shutdown, +/// which should drop all connections and finish all requests. +/// +/// They should also use the `task_tracker` to make it track things running, so +/// that it knows when the soft shutdown is over and worked. +/// +/// It also integrates with [`sd_notify`] to notify the service manager of the +/// state of the service. +pub struct LifecycleManager { + hard_shutdown_token: CancellationToken, + soft_shutdown_token: CancellationToken, + task_tracker: TaskTracker, + sigterm: Signal, + sigint: Signal, + sighup: Signal, + timeout: Duration, + reload_handlers: Vec BoxFuture<'static, ()>>>, +} + +/// Represents a thing that can be reloaded with a SIGHUP +pub trait Reloadable: Clone + Send { + fn reload(&self) -> impl Future + Send; +} + +impl Reloadable for ActivityTracker { + async fn reload(&self) { + self.flush().await; + } +} + +impl Reloadable for Templates { + async fn reload(&self) { + if let Err(err) = self.reload().await { + tracing::error!( + error = &err as &dyn std::error::Error, + "Failed to reload templates" + ); + } + } +} + +/// A wrapper around [`sd_notify::notify`] that logs any errors +fn notify(states: &[sd_notify::NotifyState]) { + if let Err(e) = sd_notify::notify(false, states) { + tracing::error!( + error = &e as &dyn std::error::Error, + "Failed to notify service manager" + ); + } +} + +impl LifecycleManager { + /// Create a new shutdown manager, installing the signal handlers + /// + /// # Errors + /// + /// Returns an error if the signal handler could not be installed + pub fn new() -> Result { + let hard_shutdown_token = CancellationToken::new(); + let soft_shutdown_token = hard_shutdown_token.child_token(); + let sigterm = tokio::signal::unix::signal(SignalKind::terminate())?; + let sigint = tokio::signal::unix::signal(SignalKind::interrupt())?; + let sighup = tokio::signal::unix::signal(SignalKind::hangup())?; + let timeout = Duration::from_secs(60); + let task_tracker = TaskTracker::new(); + + notify(&[sd_notify::NotifyState::MainPid(std::process::id())]); + + Ok(Self { + hard_shutdown_token, + soft_shutdown_token, + task_tracker, + sigterm, + sigint, + sighup, + timeout, + reload_handlers: Vec::new(), + }) + } + + /// Add a handler to be called when the server gets a SIGHUP + pub fn register_reloadable(&mut self, reloadable: &(impl Reloadable + 'static)) { + let reloadable = reloadable.clone(); + self.reload_handlers.push(Box::new(move || { + let reloadable = reloadable.clone(); + Box::pin(async move { reloadable.reload().await }) + })); + } + + /// Get a reference to the task tracker + #[must_use] + pub fn task_tracker(&self) -> &TaskTracker { + &self.task_tracker + } + + /// Get a cancellation token that can be used to react to a hard shutdown + #[must_use] + pub fn hard_shutdown_token(&self) -> CancellationToken { + self.hard_shutdown_token.clone() + } + + /// Get a cancellation token that can be used to react to a soft shutdown + #[must_use] + pub fn soft_shutdown_token(&self) -> CancellationToken { + self.soft_shutdown_token.clone() + } + + /// Run until we finish completely shutting down. + pub async fn run(mut self) -> ExitCode { + notify(&[sd_notify::NotifyState::Ready]); + + // This will be `Some` if we have the watchdog enabled, and `None` if not + let mut watchdog_interval = { + let mut watchdog_usec = 0; + if sd_notify::watchdog_enabled(false, &mut watchdog_usec) { + Some(tokio::time::interval(Duration::from_micros( + watchdog_usec / 2, + ))) + } else { + None + } + }; + + // Wait for a first shutdown signal and trigger the soft shutdown + let likely_crashed = loop { + // This makes a Future that will either yield the watchdog tick if enabled, or a + // pending Future if not + let watchdog_tick = if let Some(watchdog_interval) = &mut watchdog_interval { + Either::Left(watchdog_interval.tick()) + } else { + Either::Right(futures_util::future::pending()) + }; + + tokio::select! { + () = self.soft_shutdown_token.cancelled() => { + tracing::warn!("Another task triggered a shutdown, it likely crashed! Shutting down"); + break true; + }, + + _ = self.sigterm.recv() => { + tracing::info!("Shutdown signal received (SIGTERM), shutting down"); + break false; + }, + + _ = self.sigint.recv() => { + tracing::info!("Shutdown signal received (SIGINT), shutting down"); + break false; + }, + + _ = watchdog_tick => { + notify(&[ + sd_notify::NotifyState::Watchdog, + ]); + }, + + _ = self.sighup.recv() => { + tracing::info!("Reload signal received (SIGHUP), reloading"); + + notify(&[ + sd_notify::NotifyState::Reloading, + sd_notify::NotifyState::monotonic_usec_now() + .expect("Failed to read monotonic clock") + ]); + + // XXX: if one handler takes a long time, it will block the + // rest of the shutdown process, which is not ideal. We + // should probably have a timeout here + futures_util::future::join_all( + self.reload_handlers + .iter() + .map(|handler| handler()) + ).await; + + notify(&[sd_notify::NotifyState::Ready]); + + tracing::info!("Reloading done"); + }, + } + }; + + notify(&[sd_notify::NotifyState::Stopping]); + + self.soft_shutdown_token.cancel(); + self.task_tracker.close(); + + // Start the timeout + let timeout = tokio::time::sleep(self.timeout); + tokio::select! { + _ = self.sigterm.recv() => { + tracing::warn!("Second shutdown signal received (SIGTERM), abort"); + }, + _ = self.sigint.recv() => { + tracing::warn!("Second shutdown signal received (SIGINT), abort"); + }, + () = timeout => { + tracing::warn!("Shutdown timeout reached, abort"); + }, + () = self.task_tracker.wait() => { + // This is the "happy path", we have gracefully shutdown + }, + } + + self.hard_shutdown_token().cancel(); + + // TODO: we may want to have a time out on the task tracker, in case we have + // really stuck tasks on it + self.task_tracker().wait().await; + + tracing::info!("All tasks are done, exitting"); + + if likely_crashed { + ExitCode::FAILURE + } else { + ExitCode::SUCCESS + } + } +} diff --git a/matrix-authentication-service/crates/cli/src/main.rs b/matrix-authentication-service/crates/cli/src/main.rs new file mode 100644 index 00000000..9c1121cc --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/main.rs @@ -0,0 +1,181 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::module_name_repetitions)] + +use std::{io::IsTerminal, process::ExitCode, sync::Arc}; + +use anyhow::Context; +use clap::Parser; +use mas_config::{ConfigurationSectionExt, TelemetryConfig}; +use sentry_tracing::EventFilter; +use tracing_subscriber::{ + EnvFilter, Layer, Registry, + filter::{LevelFilter, filter_fn}, + layer::SubscriberExt, + util::SubscriberInitExt, +}; + +mod app_state; +mod commands; +mod lifecycle; +mod server; +mod sync; +mod telemetry; +mod util; + +/// The application version, as reported by `git describe` at build time +static VERSION: &str = env!("VERGEN_GIT_DESCRIBE"); + +#[derive(Debug)] +struct SentryTransportFactory { + client: reqwest::Client, +} + +impl SentryTransportFactory { + fn new() -> Self { + Self { + client: mas_http::reqwest_client(), + } + } +} + +impl sentry::TransportFactory for SentryTransportFactory { + fn create_transport(&self, options: &sentry::ClientOptions) -> Arc { + let transport = + sentry::transports::ReqwestHttpTransport::with_client(options, self.client.clone()); + + Arc::new(transport) + } +} + +fn main() -> anyhow::Result { + let mut builder = tokio::runtime::Builder::new_multi_thread(); + builder.enable_all(); + + #[cfg(tokio_unstable)] + builder + .enable_metrics_poll_time_histogram() + .metrics_poll_time_histogram_configuration(tokio::runtime::HistogramConfiguration::log( + tokio::runtime::LogHistogram::default(), + )); + + let runtime = builder.build()?; + + runtime.block_on(async_main()) +} + +async fn async_main() -> anyhow::Result { + // We're splitting the "fallible" part of main in another function to have a + // chance to shutdown the telemetry exporters regardless of if there was an + // error or not + let res = try_main().await; + if let Err(err) = self::telemetry::shutdown() { + eprintln!("Failed to shutdown telemetry exporters: {err}"); + } + res +} + +async fn try_main() -> anyhow::Result { + // Load environment variables from .env files + // We keep the path to log it afterwards + let dotenv_path: Result, _> = dotenvy::dotenv() + .map(Some) + // Display the error if it is something other than the .env file not existing + .or_else(|e| if e.not_found() { Ok(None) } else { Err(e) }); + + // Setup logging + // This writes logs to stderr + let output = std::io::stderr(); + let with_ansi = output.is_terminal(); + let (log_writer, _guard) = tracing_appender::non_blocking(output); + let fmt_layer = tracing_subscriber::fmt::layer() + .with_writer(log_writer) + .event_format(mas_context::EventFormatter) + .with_ansi(with_ansi); + let filter_layer = EnvFilter::try_from_default_env() + .or_else(|_| EnvFilter::try_new("info")) + .context("could not setup logging filter")?; + + // Suppress the following warning from the Jaeger propagator: + // Invalid jaeger header format header_value="" + let suppress_layer = filter_fn(|metadata| metadata.name() != "JaegerPropagator.InvalidHeader"); + + // Setup the rustls crypto provider + rustls::crypto::aws_lc_rs::default_provider() + .install_default() + .map_err(|_| anyhow::anyhow!("could not install the AWS LC crypto provider"))?; + + // Parse the CLI arguments + let opts = self::commands::Options::parse(); + + // Load the base configuration files + let figment = opts.figment(); + + let telemetry_config = TelemetryConfig::extract_or_default(&figment) + .map_err(anyhow::Error::from_boxed) + .context("Failed to load telemetry config")?; + + // Setup Sentry + let sentry = sentry::init(( + telemetry_config.sentry.dsn.as_deref(), + sentry::ClientOptions { + transport: Some(Arc::new(SentryTransportFactory::new())), + environment: telemetry_config.sentry.environment.clone().map(Into::into), + release: Some(VERSION.into()), + sample_rate: telemetry_config.sentry.sample_rate.unwrap_or(1.0), + traces_sample_rate: telemetry_config.sentry.traces_sample_rate.unwrap_or(0.0), + ..Default::default() + }, + )); + + let sentry_layer = sentry.is_enabled().then(|| { + sentry_tracing::layer().event_filter(|md| { + // By default, Sentry records all events as breadcrumbs, except errors. + // + // Because we're emitting error events for 5xx responses, we need to exclude + // them and also record them as breadcrumbs. + if md.name() == "http.server.response" { + EventFilter::Breadcrumb + } else { + sentry_tracing::default_event_filter(md) + } + }) + }); + + // Setup OpenTelemetry tracing and metrics + self::telemetry::setup(&telemetry_config).context("failed to setup OpenTelemetry")?; + + let tracer = self::telemetry::TRACER + .get() + .context("TRACER was not set")?; + + let telemetry_layer = tracing_opentelemetry::layer() + .with_tracer(tracer.clone()) + .with_tracked_inactivity(false) + .with_filter(LevelFilter::INFO); + + let subscriber = Registry::default() + .with(suppress_layer) + .with(sentry_layer) + .with(telemetry_layer) + .with(filter_layer) + .with(fmt_layer); + subscriber + .try_init() + .context("could not initialize logging")?; + + // Log about the .env loading + match dotenv_path { + Ok(Some(path)) => tracing::info!(?path, "Loaded environment variables from .env file"), + Ok(None) => {} + Err(e) => tracing::warn!(?e, "Failed to load .env file"), + } + + // And run the command + tracing::trace!(?opts, "Running command"); + opts.run(&figment).await +} diff --git a/matrix-authentication-service/crates/cli/src/server.rs b/matrix-authentication-service/crates/cli/src/server.rs new file mode 100644 index 00000000..25641c85 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/server.rs @@ -0,0 +1,429 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, TcpListener, ToSocketAddrs}, + os::unix::net::UnixListener, + time::Duration, +}; + +use anyhow::Context; +use axum::{ + Extension, Router, + extract::{FromRef, MatchedPath}, +}; +use headers::{CacheControl, HeaderMapExt as _, UserAgent}; +use hyper::{Method, Request, Response, StatusCode, Version, header::USER_AGENT}; +use listenfd::ListenFd; +use mas_config::{HttpBindConfig, HttpResource, HttpTlsConfig, UnixOrTcp}; +use mas_context::LogContext; +use mas_listener::{ConnectionInfo, unix_or_tcp::UnixOrTcpListener}; +use mas_router::Route; +use mas_templates::Templates; +use mas_tower::{ + DurationRecorderLayer, InFlightCounterLayer, KV, TraceLayer, make_span_fn, + metrics_attributes_fn, +}; +use opentelemetry::{Key, KeyValue}; +use opentelemetry_http::HeaderExtractor; +use opentelemetry_semantic_conventions::trace::{ + HTTP_REQUEST_METHOD, HTTP_RESPONSE_STATUS_CODE, HTTP_ROUTE, NETWORK_PROTOCOL_NAME, + NETWORK_PROTOCOL_VERSION, URL_PATH, URL_QUERY, URL_SCHEME, USER_AGENT_ORIGINAL, +}; +use rustls::ServerConfig; +use sentry_tower::{NewSentryLayer, SentryHttpLayer}; +use tower::Layer; +use tower_http::services::{ServeDir, fs::ServeFileSystemResponseBody}; +use tracing::Span; +use tracing_opentelemetry::OpenTelemetrySpanExt; + +use crate::app_state::AppState; + +const MAS_LISTENER_NAME: Key = Key::from_static_str("mas.listener.name"); + +#[inline] +fn otel_http_method(request: &Request) -> &'static str { + match request.method() { + &Method::OPTIONS => "OPTIONS", + &Method::GET => "GET", + &Method::POST => "POST", + &Method::PUT => "PUT", + &Method::DELETE => "DELETE", + &Method::HEAD => "HEAD", + &Method::TRACE => "TRACE", + &Method::CONNECT => "CONNECT", + &Method::PATCH => "PATCH", + _other => "_OTHER", + } +} + +#[inline] +fn otel_net_protocol_version(request: &Request) -> &'static str { + match request.version() { + Version::HTTP_09 => "0.9", + Version::HTTP_10 => "1.0", + Version::HTTP_11 => "1.1", + Version::HTTP_2 => "2.0", + Version::HTTP_3 => "3.0", + _other => "_OTHER", + } +} + +fn otel_http_route(request: &Request) -> Option<&str> { + request + .extensions() + .get::() + .map(MatchedPath::as_str) +} + +fn otel_url_scheme(request: &Request) -> &'static str { + // XXX: maybe we should panic if the connection info was not injected in the + // request extensions + request + .extensions() + .get::() + .map_or("http", |conn_info| { + if conn_info.get_tls_ref().is_some() { + "https" + } else { + "http" + } + }) +} + +fn make_http_span(req: &Request) -> Span { + let method = otel_http_method(req); + let route = otel_http_route(req); + + let span_name = if let Some(route) = route.as_ref() { + format!("{method} {route}") + } else { + method.to_owned() + }; + + let span = tracing::info_span!( + "http.server.request", + "otel.kind" = "server", + "otel.name" = span_name, + "otel.status_code" = tracing::field::Empty, + { NETWORK_PROTOCOL_NAME } = "http", + { NETWORK_PROTOCOL_VERSION } = otel_net_protocol_version(req), + { HTTP_REQUEST_METHOD } = method, + { HTTP_ROUTE } = tracing::field::Empty, + { HTTP_RESPONSE_STATUS_CODE } = tracing::field::Empty, + { URL_PATH } = req.uri().path(), + { URL_QUERY } = tracing::field::Empty, + { URL_SCHEME } = otel_url_scheme(req), + { USER_AGENT_ORIGINAL } = tracing::field::Empty, + ); + + if let Some(route) = route.as_ref() { + span.record(HTTP_ROUTE, route); + } + + if let Some(query) = req.uri().query() { + span.record(URL_QUERY, query); + } + + if let Some(user_agent) = req + .headers() + .get(USER_AGENT) + .and_then(|ua| ua.to_str().ok()) + { + span.record(USER_AGENT_ORIGINAL, user_agent); + } + + // In case the span is disabled by any of tracing layers, e.g. if `RUST_LOG` + // is set to `warn`, `set_parent` will fail. So we only try to set the + // parent context if the span is not disabled. + if !span.is_disabled() { + // Extract the parent span context from the request headers + let parent_context = opentelemetry::global::get_text_map_propagator(|propagator| { + let extractor = HeaderExtractor(req.headers()); + let context = opentelemetry::Context::new(); + propagator.extract_with_context(&context, &extractor) + }); + + if let Err(err) = span.set_parent(parent_context) { + tracing::error!( + error = &err as &dyn std::error::Error, + "Failed to set parent context on span" + ); + } + } + + span +} + +fn on_http_request_labels(request: &Request) -> Vec { + vec![ + KeyValue::new(NETWORK_PROTOCOL_NAME, "http"), + KeyValue::new(NETWORK_PROTOCOL_VERSION, otel_net_protocol_version(request)), + KeyValue::new(HTTP_REQUEST_METHOD, otel_http_method(request)), + KeyValue::new( + HTTP_ROUTE, + otel_http_route(request).unwrap_or("FALLBACK").to_owned(), + ), + KeyValue::new(URL_SCHEME, otel_url_scheme(request)), + ] +} + +fn on_http_response_labels(res: &Response) -> Vec { + vec![KeyValue::new( + HTTP_RESPONSE_STATUS_CODE, + i64::from(res.status().as_u16()), + )] +} + +async fn log_response_middleware( + request: axum::extract::Request, + next: axum::middleware::Next, +) -> axum::response::Response { + let user_agent: Option = request.headers().typed_get(); + let user_agent = user_agent.as_ref().map_or("-", |u| u.as_str()); + let method = otel_http_method(&request); + let path = request.uri().path().to_owned(); + let version = otel_net_protocol_version(&request); + + let response = next.run(request).await; + + let Some(stats) = LogContext::maybe_with(LogContext::stats) else { + tracing::error!("Missing log context for request, this is a bug!"); + return response; + }; + + let status_code = response.status(); + match status_code.as_u16() { + 100..=399 => tracing::info!( + name: "http.server.response", + "\"{method} {path} HTTP/{version}\" {status_code} {user_agent:?} [{stats}]", + ), + 400..=499 => tracing::warn!( + name: "http.server.response", + "\"{method} {path} HTTP/{version}\" {status_code} {user_agent:?} [{stats}]", + ), + 500..=599 => tracing::error!( + name: "http.server.response", + "\"{method} {path} HTTP/{version}\" {status_code} {user_agent:?} [{stats}]", + ), + _ => { /* This shouldn't happen */ } + } + + response +} + +pub fn build_router( + state: AppState, + resources: &[HttpResource], + prefix: Option<&str>, + name: Option<&str>, +) -> Router<()> { + let templates = Templates::from_ref(&state); + let mut router = Router::new(); + + for resource in resources { + router = match resource { + mas_config::HttpResource::Health => { + router.merge(mas_handlers::healthcheck_router::()) + } + mas_config::HttpResource::Prometheus => { + router.route_service("/metrics", crate::telemetry::prometheus_service()) + } + mas_config::HttpResource::Discovery => { + router.merge(mas_handlers::discovery_router::()) + } + mas_config::HttpResource::Human => { + router.merge(mas_handlers::human_router::(templates.clone())) + } + mas_config::HttpResource::GraphQL { + playground, + undocumented_oauth2_access, + } => router.merge(mas_handlers::graphql_router::( + *playground, + *undocumented_oauth2_access, + )), + mas_config::HttpResource::Assets { path } => { + let static_service = ServeDir::new(path) + .append_index_html_on_directories(false) + // The vite build pre-compresses assets with brotli and gzip + .precompressed_br() + .precompressed_gzip(); + + let add_cache_headers = axum::middleware::map_response( + async |mut res: Response| { + let cache_control = if res.status() == StatusCode::NOT_FOUND { + // Cache 404s for 5 minutes + CacheControl::new() + .with_public() + .with_max_age(Duration::from_secs(5 * 60)) + } else { + // Cache assets for 1 year + CacheControl::new() + .with_public() + .with_max_age(Duration::from_secs(365 * 24 * 60 * 60)) + .with_immutable() + }; + res.headers_mut().typed_insert(cache_control); + res + }, + ); + + router.nest_service( + mas_router::StaticAsset::route(), + add_cache_headers.layer(static_service), + ) + } + mas_config::HttpResource::OAuth => router.merge(mas_handlers::api_router::()), + mas_config::HttpResource::Compat => { + router.merge(mas_handlers::compat_router::(templates.clone())) + } + mas_config::HttpResource::AdminApi => { + let (_, api_router) = mas_handlers::admin_api_router::(); + router.merge(api_router) + } + // TODO: do a better handler here + mas_config::HttpResource::ConnectionInfo => router.route( + "/connection-info", + axum::routing::get(async |connection: Extension| { + format!("{connection:?}") + }), + ), + } + } + + // We normalize the prefix: + // - if it's None, it becomes '/' + // - if it's Some(..), any trailing '/' is first trimmed, then a '/' is added + let prefix = format!("{}/", prefix.unwrap_or_default().trim_end_matches('/')); + // Then we only nest the router if the prefix is not empty and not the root + // If we blindly nest the router if the prefix is Some("/"), axum will panic as + // we're not supposed to nest the router at the root + if !prefix.is_empty() && prefix != "/" { + router = Router::new().nest(&prefix, router); + } + + router = router.fallback(mas_handlers::fallback); + + router + .layer(axum::middleware::from_fn(log_response_middleware)) + .layer( + InFlightCounterLayer::new("http.server.active_requests").on_request(( + name.map(|name| KeyValue::new(MAS_LISTENER_NAME, name.to_owned())), + metrics_attributes_fn(on_http_request_labels), + )), + ) + .layer( + DurationRecorderLayer::new("http.server.duration") + .on_request(( + name.map(|name| KeyValue::new(MAS_LISTENER_NAME, name.to_owned())), + metrics_attributes_fn(on_http_request_labels), + )) + .on_response_fn(on_http_response_labels), + ) + .layer( + TraceLayer::new(( + make_span_fn(make_http_span), + name.map(|name| KV("mas.listener.name", name.to_owned())), + )) + .on_response_fn(|span: &Span, response: &Response<_>| { + let status_code = response.status().as_u16(); + span.record("http.response.status_code", status_code); + span.record("otel.status_code", "OK"); + }), + ) + .layer(mas_context::LogContextLayer::new(|req| { + otel_http_method(req).into() + })) + // Careful about the order here: the `NewSentryLayer` must be around the + // `SentryHttpLayer`. axum makes new layers wrap the existing ones, + // which is the other way around compared to `tower::ServiceBuilder`. + // So even if the Sentry docs has an example that does + // 'NewSentryHttpLayer then SentryHttpLayer', we must do the opposite. + .layer(SentryHttpLayer::new().enable_transaction()) + .layer(NewSentryLayer::new_from_top()) + .with_state(state) +} + +pub fn build_tls_server_config(config: &HttpTlsConfig) -> Result { + let (key, chain) = config.load()?; + + let mut config = rustls::ServerConfig::builder() + .with_no_client_auth() + .with_single_cert(chain, key) + .context("failed to build TLS server config")?; + config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()]; + + Ok(config) +} + +pub fn build_listeners( + fd_manager: &mut ListenFd, + configs: &[HttpBindConfig], +) -> Result, anyhow::Error> { + let mut listeners = Vec::with_capacity(configs.len()); + + for bind in configs { + let listener = match bind { + HttpBindConfig::Listen { host, port } => { + let addrs = match host.as_deref() { + Some(host) => (host, *port) + .to_socket_addrs() + .context("could not parse listener host")? + .collect(), + + None => vec![ + SocketAddr::new(IpAddr::V6(Ipv6Addr::UNSPECIFIED), *port), + SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), *port), + ], + }; + + let listener = TcpListener::bind(&addrs[..]).context("could not bind address")?; + listener.set_nonblocking(true)?; + listener.try_into()? + } + + HttpBindConfig::Address { address } => { + let addr: SocketAddr = address + .parse() + .context("could not parse listener address")?; + let listener = TcpListener::bind(addr).context("could not bind address")?; + listener.set_nonblocking(true)?; + listener.try_into()? + } + + HttpBindConfig::Unix { socket } => { + let listener = UnixListener::bind(socket).context("could not bind socket")?; + listener.try_into()? + } + + HttpBindConfig::FileDescriptor { + fd, + kind: UnixOrTcp::Tcp, + } => { + let listener = fd_manager + .take_tcp_listener(*fd)? + .context("no listener found on file descriptor")?; + listener.set_nonblocking(true)?; + listener.try_into()? + } + + HttpBindConfig::FileDescriptor { + fd, + kind: UnixOrTcp::Unix, + } => { + let listener = fd_manager + .take_unix_listener(*fd)? + .context("no unix socket found on file descriptor")?; + listener.set_nonblocking(true)?; + listener.try_into()? + } + }; + + listeners.push(listener); + } + + Ok(listeners) +} diff --git a/matrix-authentication-service/crates/cli/src/sync.rs b/matrix-authentication-service/crates/cli/src/sync.rs new file mode 100644 index 00000000..c4aeb9a9 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/sync.rs @@ -0,0 +1,430 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Utilities to synchronize the configuration file with the database. + +use std::collections::{BTreeMap, BTreeSet}; + +use mas_config::{ClientsConfig, UpstreamOAuth2Config}; +use mas_data_model::Clock; +use mas_keystore::Encrypter; +use mas_storage::{ + Pagination, RepositoryAccess, + upstream_oauth2::{UpstreamOAuthProviderFilter, UpstreamOAuthProviderParams}, +}; +use mas_storage_pg::PgRepository; +use sqlx::{Connection, PgConnection, postgres::PgAdvisoryLock}; +use tracing::{error, info, info_span, warn}; + +fn map_import_action( + config: mas_config::UpstreamOAuth2ImportAction, +) -> mas_data_model::UpstreamOAuthProviderImportAction { + match config { + mas_config::UpstreamOAuth2ImportAction::Ignore => { + mas_data_model::UpstreamOAuthProviderImportAction::Ignore + } + mas_config::UpstreamOAuth2ImportAction::Suggest => { + mas_data_model::UpstreamOAuthProviderImportAction::Suggest + } + mas_config::UpstreamOAuth2ImportAction::Force => { + mas_data_model::UpstreamOAuthProviderImportAction::Force + } + mas_config::UpstreamOAuth2ImportAction::Require => { + mas_data_model::UpstreamOAuthProviderImportAction::Require + } + } +} + +fn map_import_on_conflict( + config: mas_config::UpstreamOAuth2OnConflict, +) -> mas_data_model::UpstreamOAuthProviderOnConflict { + match config { + mas_config::UpstreamOAuth2OnConflict::Add => { + mas_data_model::UpstreamOAuthProviderOnConflict::Add + } + mas_config::UpstreamOAuth2OnConflict::Replace => { + mas_data_model::UpstreamOAuthProviderOnConflict::Replace + } + mas_config::UpstreamOAuth2OnConflict::Set => { + mas_data_model::UpstreamOAuthProviderOnConflict::Set + } + mas_config::UpstreamOAuth2OnConflict::Fail => { + mas_data_model::UpstreamOAuthProviderOnConflict::Fail + } + } +} + +fn map_claims_imports( + config: &mas_config::UpstreamOAuth2ClaimsImports, +) -> mas_data_model::UpstreamOAuthProviderClaimsImports { + mas_data_model::UpstreamOAuthProviderClaimsImports { + subject: mas_data_model::UpstreamOAuthProviderSubjectPreference { + template: config.subject.template.clone(), + }, + skip_confirmation: config.skip_confirmation, + localpart: mas_data_model::UpstreamOAuthProviderLocalpartPreference { + action: map_import_action(config.localpart.action), + template: config.localpart.template.clone(), + on_conflict: map_import_on_conflict(config.localpart.on_conflict), + }, + displayname: mas_data_model::UpstreamOAuthProviderImportPreference { + action: map_import_action(config.displayname.action), + template: config.displayname.template.clone(), + }, + email: mas_data_model::UpstreamOAuthProviderImportPreference { + action: map_import_action(config.email.action), + template: config.email.template.clone(), + }, + account_name: mas_data_model::UpstreamOAuthProviderSubjectPreference { + template: config.account_name.template.clone(), + }, + } +} + +#[tracing::instrument(name = "config.sync", skip_all)] +pub async fn config_sync( + upstream_oauth2_config: UpstreamOAuth2Config, + clients_config: ClientsConfig, + connection: &mut PgConnection, + encrypter: &Encrypter, + clock: &dyn Clock, + prune: bool, + dry_run: bool, +) -> anyhow::Result<()> { + // Start a transaction + let txn = connection.begin().await?; + + // Grab a lock within the transaction + tracing::info!("Acquiring configuration lock"); + let lock = PgAdvisoryLock::new("MAS config sync"); + let lock = lock.acquire(txn).await?; + + // Create a repository from the connection with the lock + let mut repo = PgRepository::from_conn(lock); + + tracing::info!( + prune, + dry_run, + "Syncing providers and clients defined in config to database" + ); + + { + let _span = info_span!("cli.config.sync.providers").entered(); + let config_ids = upstream_oauth2_config + .providers + .iter() + .filter(|p| p.enabled) + .map(|p| p.id) + .collect::>(); + + // Let's assume we have less than 1000 providers + let page = repo + .upstream_oauth_provider() + .list( + UpstreamOAuthProviderFilter::default(), + Pagination::first(1000), + ) + .await?; + + // A warning is probably enough + if page.has_next_page { + warn!( + "More than 1000 providers in the database, only the first 1000 will be considered" + ); + } + + let mut existing_enabled_ids = BTreeSet::new(); + let mut existing_disabled = BTreeMap::new(); + // Process the existing providers + for edge in page.edges { + let provider = edge.node; + if provider.enabled() { + if config_ids.contains(&provider.id) { + existing_enabled_ids.insert(provider.id); + } else { + // Provider is enabled in the database but not in the config + info!(%provider.id, "Disabling provider"); + + let provider = if dry_run { + provider + } else { + repo.upstream_oauth_provider() + .disable(clock, provider) + .await? + }; + + existing_disabled.insert(provider.id, provider); + } + } else { + existing_disabled.insert(provider.id, provider); + } + } + + if prune { + for provider_id in existing_disabled.keys().copied() { + info!(provider.id = %provider_id, "Deleting provider"); + + if dry_run { + continue; + } + + repo.upstream_oauth_provider() + .delete_by_id(provider_id) + .await?; + } + } else { + let len = existing_disabled.len(); + match len { + 0 => {} + 1 => warn!( + "A provider is soft-deleted in the database. Run `mas-cli config sync --prune` to delete it." + ), + n => warn!( + "{n} providers are soft-deleted in the database. Run `mas-cli config sync --prune` to delete them." + ), + } + } + + for (index, provider) in upstream_oauth2_config.providers.into_iter().enumerate() { + if !provider.enabled { + continue; + } + + // Use the position in the config of the provider as position in the UI + let ui_order = index.try_into().unwrap_or(i32::MAX); + + let _span = info_span!("provider", %provider.id).entered(); + if existing_enabled_ids.contains(&provider.id) { + info!(provider.id = %provider.id, "Updating provider"); + } else if existing_disabled.contains_key(&provider.id) { + info!(provider.id = %provider.id, "Enabling and updating provider"); + } else { + info!(provider.id = %provider.id, "Adding provider"); + } + + if dry_run { + continue; + } + + let encrypted_client_secret = if let Some(client_secret) = provider.client_secret { + Some(encrypter.encrypt_to_string(client_secret.value().await?.as_bytes())?) + } else if let Some(mut siwa) = provider.sign_in_with_apple.clone() { + // if private key file is defined and not private key (raw), we populate the + // private key to hold the content of the private key file. + // private key (raw) takes precedence so both can be defined + // without issues + if siwa.private_key.is_none() + && let Some(private_key_file) = siwa.private_key_file.take() + { + let key = tokio::fs::read_to_string(private_key_file).await?; + siwa.private_key = Some(key); + } + let encoded = serde_json::to_vec(&siwa)?; + Some(encrypter.encrypt_to_string(&encoded)?) + } else { + None + }; + + let discovery_mode = match provider.discovery_mode { + mas_config::UpstreamOAuth2DiscoveryMode::Oidc => { + mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc + } + mas_config::UpstreamOAuth2DiscoveryMode::Insecure => { + mas_data_model::UpstreamOAuthProviderDiscoveryMode::Insecure + } + mas_config::UpstreamOAuth2DiscoveryMode::Disabled => { + mas_data_model::UpstreamOAuthProviderDiscoveryMode::Disabled + } + }; + + let token_endpoint_auth_method = match provider.token_endpoint_auth_method { + mas_config::UpstreamOAuth2TokenAuthMethod::None => { + mas_data_model::UpstreamOAuthProviderTokenAuthMethod::None + } + mas_config::UpstreamOAuth2TokenAuthMethod::ClientSecretBasic => { + mas_data_model::UpstreamOAuthProviderTokenAuthMethod::ClientSecretBasic + } + mas_config::UpstreamOAuth2TokenAuthMethod::ClientSecretPost => { + mas_data_model::UpstreamOAuthProviderTokenAuthMethod::ClientSecretPost + } + mas_config::UpstreamOAuth2TokenAuthMethod::ClientSecretJwt => { + mas_data_model::UpstreamOAuthProviderTokenAuthMethod::ClientSecretJwt + } + mas_config::UpstreamOAuth2TokenAuthMethod::PrivateKeyJwt => { + mas_data_model::UpstreamOAuthProviderTokenAuthMethod::PrivateKeyJwt + } + mas_config::UpstreamOAuth2TokenAuthMethod::SignInWithApple => { + mas_data_model::UpstreamOAuthProviderTokenAuthMethod::SignInWithApple + } + }; + + let response_mode = provider + .response_mode + .map(|response_mode| match response_mode { + mas_config::UpstreamOAuth2ResponseMode::Query => { + mas_data_model::UpstreamOAuthProviderResponseMode::Query + } + mas_config::UpstreamOAuth2ResponseMode::FormPost => { + mas_data_model::UpstreamOAuthProviderResponseMode::FormPost + } + }); + + if discovery_mode.is_disabled() { + if provider.authorization_endpoint.is_none() { + error!(provider.id = %provider.id, "Provider has discovery disabled but no authorization endpoint set"); + } + + if provider.token_endpoint.is_none() { + error!(provider.id = %provider.id, "Provider has discovery disabled but no token endpoint set"); + } + + if provider.jwks_uri.is_none() { + warn!(provider.id = %provider.id, "Provider has discovery disabled but no JWKS URI set"); + } + } + + let pkce_mode = match provider.pkce_method { + mas_config::UpstreamOAuth2PkceMethod::Auto => { + mas_data_model::UpstreamOAuthProviderPkceMode::Auto + } + mas_config::UpstreamOAuth2PkceMethod::Always => { + mas_data_model::UpstreamOAuthProviderPkceMode::S256 + } + mas_config::UpstreamOAuth2PkceMethod::Never => { + mas_data_model::UpstreamOAuthProviderPkceMode::Disabled + } + }; + + let on_backchannel_logout = match provider.on_backchannel_logout { + mas_config::UpstreamOAuth2OnBackchannelLogout::DoNothing => { + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing + } + mas_config::UpstreamOAuth2OnBackchannelLogout::LogoutBrowserOnly => { + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::LogoutBrowserOnly + } + mas_config::UpstreamOAuth2OnBackchannelLogout::LogoutAll => { + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::LogoutAll + } + }; + + repo.upstream_oauth_provider() + .upsert( + clock, + provider.id, + UpstreamOAuthProviderParams { + issuer: provider.issuer, + human_name: provider.human_name, + brand_name: provider.brand_name, + scope: provider.scope.parse()?, + token_endpoint_auth_method, + token_endpoint_signing_alg: provider.token_endpoint_auth_signing_alg, + id_token_signed_response_alg: provider.id_token_signed_response_alg, + client_id: provider.client_id, + encrypted_client_secret, + claims_imports: map_claims_imports(&provider.claims_imports), + token_endpoint_override: provider.token_endpoint, + userinfo_endpoint_override: provider.userinfo_endpoint, + authorization_endpoint_override: provider.authorization_endpoint, + jwks_uri_override: provider.jwks_uri, + discovery_mode, + pkce_mode, + fetch_userinfo: provider.fetch_userinfo, + userinfo_signed_response_alg: provider.userinfo_signed_response_alg, + response_mode, + additional_authorization_parameters: provider + .additional_authorization_parameters + .into_iter() + .collect(), + forward_login_hint: provider.forward_login_hint, + ui_order, + on_backchannel_logout, + }, + ) + .await?; + } + } + + { + let _span = info_span!("cli.config.sync.clients").entered(); + let config_ids = clients_config + .iter() + .map(|c| c.client_id) + .collect::>(); + + let existing = repo.oauth2_client().all_static().await?; + let existing_ids = existing.iter().map(|p| p.id).collect::>(); + let to_delete = existing.into_iter().filter(|p| !config_ids.contains(&p.id)); + if prune { + for client in to_delete { + info!(client.id = %client.client_id, "Deleting client"); + + if dry_run { + continue; + } + + repo.oauth2_client().delete(client).await?; + } + } else { + let len = to_delete.count(); + match len { + 0 => {} + 1 => warn!( + "A static client in the database is not in the config. Run with `--prune` to delete it." + ), + n => warn!( + "{n} static clients in the database are not in the config. Run with `--prune` to delete them." + ), + } + } + + for client in clients_config { + let _span = info_span!("client", client.id = %client.client_id).entered(); + if existing_ids.contains(&client.client_id) { + info!(client.id = %client.client_id, "Updating client"); + } else { + info!(client.id = %client.client_id, "Adding client"); + } + + if dry_run { + continue; + } + + let client_secret = client.client_secret().await?; + let client_name = client.client_name.as_ref(); + let client_auth_method = client.client_auth_method(); + let jwks = client.jwks.as_ref(); + let jwks_uri = client.jwks_uri.as_ref(); + + // TODO: should be moved somewhere else + let encrypted_client_secret = client_secret + .map(|client_secret| encrypter.encrypt_to_string(client_secret.as_bytes())) + .transpose()?; + + repo.oauth2_client() + .upsert_static( + client.client_id, + client_name.cloned(), + client_auth_method, + encrypted_client_secret, + jwks.cloned(), + jwks_uri.cloned(), + client.redirect_uris, + ) + .await?; + } + } + + // Get the lock and release it to commit the transaction + let lock = repo.into_inner(); + let txn = lock.release_now().await?; + if dry_run { + info!("Dry run, rolling back changes"); + txn.rollback().await?; + } else { + txn.commit().await?; + } + Ok(()) +} diff --git a/matrix-authentication-service/crates/cli/src/telemetry.rs b/matrix-authentication-service/crates/cli/src/telemetry.rs new file mode 100644 index 00000000..101a00d5 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/telemetry.rs @@ -0,0 +1,293 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::{LazyLock, OnceLock}; + +use anyhow::Context as _; +use bytes::Bytes; +use http_body_util::Full; +use hyper::{Response, header::CONTENT_TYPE}; +use mas_config::{ + MetricsConfig, MetricsExporterKind, Propagator, TelemetryConfig, TracingConfig, + TracingExporterKind, +}; +use opentelemetry::{ + InstrumentationScope, KeyValue, + metrics::Meter, + propagation::{TextMapCompositePropagator, TextMapPropagator}, + trace::TracerProvider as _, +}; +use opentelemetry_otlp::{WithExportConfig, WithHttpConfig}; +use opentelemetry_prometheus_text_exporter::PrometheusExporter; +use opentelemetry_sdk::{ + Resource, + metrics::{ManualReader, SdkMeterProvider, periodic_reader_with_async_runtime::PeriodicReader}, + propagation::{BaggagePropagator, TraceContextPropagator}, + trace::{ + IdGenerator, Sampler, SdkTracerProvider, Tracer, + span_processor_with_async_runtime::BatchSpanProcessor, + }, +}; +use opentelemetry_semantic_conventions as semcov; + +static SCOPE: LazyLock = LazyLock::new(|| { + InstrumentationScope::builder(env!("CARGO_PKG_NAME")) + .with_version(env!("CARGO_PKG_VERSION")) + .with_schema_url(semcov::SCHEMA_URL) + .build() +}); + +pub static METER: LazyLock = + LazyLock::new(|| opentelemetry::global::meter_with_scope(SCOPE.clone())); + +pub static TRACER: OnceLock = OnceLock::new(); +static METER_PROVIDER: OnceLock = OnceLock::new(); +static TRACER_PROVIDER: OnceLock = OnceLock::new(); +static PROMETHEUS_EXPORTER: OnceLock = OnceLock::new(); + +pub fn setup(config: &TelemetryConfig) -> anyhow::Result<()> { + let propagator = propagator(&config.tracing.propagators); + + // The CORS filter needs to know what headers it should whitelist for + // CORS-protected requests. + mas_http::set_propagator(&propagator); + opentelemetry::global::set_text_map_propagator(propagator); + + init_tracer(&config.tracing).context("Failed to configure traces exporter")?; + init_meter(&config.metrics).context("Failed to configure metrics exporter")?; + + opentelemetry_instrumentation_process::init() + .context("Failed to configure process instrumentation")?; + opentelemetry_instrumentation_tokio::observe_current_runtime(); + + Ok(()) +} + +pub fn shutdown() -> opentelemetry_sdk::error::OTelSdkResult { + if let Some(tracer_provider) = TRACER_PROVIDER.get() { + tracer_provider.shutdown()?; + } + + if let Some(meter_provider) = METER_PROVIDER.get() { + meter_provider.shutdown()?; + } + + Ok(()) +} + +fn match_propagator(propagator: Propagator) -> Box { + use Propagator as P; + match propagator { + P::TraceContext => Box::new(TraceContextPropagator::new()), + P::Baggage => Box::new(BaggagePropagator::new()), + P::Jaeger => Box::new(opentelemetry_jaeger_propagator::Propagator::new()), + } +} + +fn propagator(propagators: &[Propagator]) -> TextMapCompositePropagator { + let propagators = propagators.iter().copied().map(match_propagator).collect(); + + TextMapCompositePropagator::new(propagators) +} + +/// An [`IdGenerator`] which always returns an invalid trace ID and span ID +/// +/// This is used when no exporter is being used, so that we don't log the trace +/// ID when we're not tracing. +#[derive(Debug, Clone, Copy)] +struct InvalidIdGenerator; +impl IdGenerator for InvalidIdGenerator { + fn new_trace_id(&self) -> opentelemetry::TraceId { + opentelemetry::TraceId::INVALID + } + fn new_span_id(&self) -> opentelemetry::SpanId { + opentelemetry::SpanId::INVALID + } +} + +fn init_tracer(config: &TracingConfig) -> anyhow::Result<()> { + let sample_rate = config.sample_rate.unwrap_or(1.0); + + // We sample traces based on the parent if we have one, and if not, we + // sample a ratio based on the configured sample rate + let sampler = Sampler::ParentBased(Box::new(Sampler::TraceIdRatioBased(sample_rate))); + + let tracer_provider_builder = SdkTracerProvider::builder() + .with_resource(resource()) + .with_sampler(sampler); + + let tracer_provider = match config.exporter { + TracingExporterKind::None => tracer_provider_builder + .with_id_generator(InvalidIdGenerator) + .with_sampler(Sampler::AlwaysOff) + .build(), + + TracingExporterKind::Stdout => { + let exporter = opentelemetry_stdout::SpanExporter::default(); + tracer_provider_builder + .with_simple_exporter(exporter) + .build() + } + + TracingExporterKind::Otlp => { + let mut exporter = opentelemetry_otlp::SpanExporter::builder() + .with_http() + .with_http_client(mas_http::reqwest_client()); + if let Some(endpoint) = &config.endpoint { + exporter = exporter.with_endpoint(endpoint.as_str()); + } + let exporter = exporter + .build() + .context("Failed to configure OTLP trace exporter")?; + + let batch_processor = + BatchSpanProcessor::builder(exporter, opentelemetry_sdk::runtime::Tokio).build(); + + tracer_provider_builder + .with_span_processor(batch_processor) + .build() + } + }; + + TRACER_PROVIDER + .set(tracer_provider.clone()) + .map_err(|_| anyhow::anyhow!("TRACER_PROVIDER was set twice"))?; + + let tracer = tracer_provider.tracer_with_scope(SCOPE.clone()); + TRACER + .set(tracer) + .map_err(|_| anyhow::anyhow!("TRACER was set twice"))?; + + opentelemetry::global::set_tracer_provider(tracer_provider); + + Ok(()) +} + +fn otlp_metric_reader( + endpoint: Option<&url::Url>, +) -> anyhow::Result> { + let mut exporter = opentelemetry_otlp::MetricExporter::builder() + .with_http() + .with_http_client(mas_http::reqwest_client()); + if let Some(endpoint) = endpoint { + exporter = exporter.with_endpoint(endpoint.to_string()); + } + let exporter = exporter + .build() + .context("Failed to configure OTLP metric exporter")?; + + let reader = PeriodicReader::builder(exporter, opentelemetry_sdk::runtime::Tokio).build(); + Ok(reader) +} + +fn stdout_metric_reader() -> PeriodicReader { + let exporter = opentelemetry_stdout::MetricExporter::builder().build(); + PeriodicReader::builder(exporter, opentelemetry_sdk::runtime::Tokio).build() +} + +type PromServiceFuture = + std::future::Ready>, std::convert::Infallible>>; + +#[allow(clippy::needless_pass_by_value)] +fn prometheus_service_fn(_req: T) -> PromServiceFuture { + let response = if let Some(exporter) = PROMETHEUS_EXPORTER.get() { + // We'll need some space for this, so we preallocate a bit + let mut buffer = Vec::with_capacity(1024); + + if let Err(err) = exporter.export(&mut buffer) { + tracing::error!( + error = &err as &dyn std::error::Error, + "Failed to export Prometheus metrics" + ); + + Response::builder() + .status(500) + .header(CONTENT_TYPE, "text/plain") + .body(Full::new(Bytes::from_static( + b"Failed to export Prometheus metrics, see logs for details", + ))) + .unwrap() + } else { + Response::builder() + .status(200) + .header(CONTENT_TYPE, "text/plain;version=1.0.0") + .body(Full::new(Bytes::from(buffer))) + .unwrap() + } + } else { + Response::builder() + .status(500) + .header(CONTENT_TYPE, "text/plain") + .body(Full::new(Bytes::from_static( + b"Prometheus exporter was not enabled in config", + ))) + .unwrap() + }; + + std::future::ready(Ok(response)) +} + +pub fn prometheus_service() -> tower::util::ServiceFn PromServiceFuture> { + if PROMETHEUS_EXPORTER.get().is_none() { + tracing::warn!( + "A Prometheus resource was mounted on a listener, but the Prometheus exporter was not setup in the config" + ); + } + + tower::service_fn(prometheus_service_fn as _) +} + +fn prometheus_metric_reader() -> anyhow::Result { + let exporter = PrometheusExporter::builder().without_scope_info().build(); + + PROMETHEUS_EXPORTER + .set(exporter.clone()) + .map_err(|_| anyhow::anyhow!("PROMETHEUS_EXPORTER was set twice"))?; + + Ok(exporter) +} + +fn init_meter(config: &MetricsConfig) -> anyhow::Result<()> { + let meter_provider_builder = SdkMeterProvider::builder(); + let meter_provider_builder = match config.exporter { + MetricsExporterKind::None => meter_provider_builder.with_reader(ManualReader::default()), + MetricsExporterKind::Stdout => meter_provider_builder.with_reader(stdout_metric_reader()), + MetricsExporterKind::Otlp => { + meter_provider_builder.with_reader(otlp_metric_reader(config.endpoint.as_ref())?) + } + MetricsExporterKind::Prometheus => { + meter_provider_builder.with_reader(prometheus_metric_reader()?) + } + }; + + let meter_provider = meter_provider_builder.with_resource(resource()).build(); + + METER_PROVIDER + .set(meter_provider.clone()) + .map_err(|_| anyhow::anyhow!("METER_PROVIDER was set twice"))?; + opentelemetry::global::set_meter_provider(meter_provider.clone()); + + Ok(()) +} + +fn resource() -> Resource { + Resource::builder() + .with_service_name(env!("CARGO_PKG_NAME")) + .with_detectors(&[ + Box::new(opentelemetry_resource_detectors::HostResourceDetector::default()), + Box::new(opentelemetry_resource_detectors::OsResourceDetector), + Box::new(opentelemetry_resource_detectors::ProcessResourceDetector), + ]) + .with_attributes([ + KeyValue::new(semcov::resource::SERVICE_VERSION, crate::VERSION), + KeyValue::new(semcov::resource::PROCESS_RUNTIME_NAME, "rust"), + KeyValue::new( + semcov::resource::PROCESS_RUNTIME_VERSION, + env!("VERGEN_RUSTC_SEMVER"), + ), + ]) + .build() +} diff --git a/matrix-authentication-service/crates/cli/src/util.rs b/matrix-authentication-service/crates/cli/src/util.rs new file mode 100644 index 00000000..45427615 --- /dev/null +++ b/matrix-authentication-service/crates/cli/src/util.rs @@ -0,0 +1,592 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{sync::Arc, time::Duration}; + +use anyhow::Context; +use mas_config::{ + AccountConfig, BrandingConfig, CaptchaConfig, DatabaseConfig, EmailConfig, EmailSmtpMode, + EmailTransportKind, ExperimentalConfig, HomeserverKind, MatrixConfig, PasswordsConfig, + PolicyConfig, TemplatesConfig, +}; +use mas_context::LogContext; +use mas_data_model::{SessionExpirationConfig, SessionLimitConfig, SiteConfig}; +use mas_email::{MailTransport, Mailer}; +use mas_handlers::passwords::PasswordManager; +use mas_matrix::{HomeserverConnection, ReadOnlyHomeserverConnection}; +use mas_matrix_synapse::{LegacySynapseConnection, SynapseConnection}; +use mas_policy::PolicyFactory; +use mas_router::UrlBuilder; +use mas_storage::{BoxRepositoryFactory, RepositoryAccess, RepositoryFactory}; +use mas_templates::{SiteConfigExt, Templates}; +use sqlx::{ + ConnectOptions, Executor, PgConnection, PgPool, + postgres::{PgConnectOptions, PgPoolOptions}, +}; +use tokio_util::{sync::CancellationToken, task::TaskTracker}; +use tracing::{Instrument, log::LevelFilter}; + +pub async fn password_manager_from_config( + config: &PasswordsConfig, +) -> Result { + if !config.enabled() { + return Ok(PasswordManager::disabled()); + } + + let schemes = config.load().await?.into_iter().map( + |(version, algorithm, cost, secret, unicode_normalization)| { + use mas_handlers::passwords::Hasher; + let hasher = match algorithm { + mas_config::PasswordAlgorithm::Pbkdf2 => { + Hasher::pbkdf2(secret, unicode_normalization) + } + mas_config::PasswordAlgorithm::Bcrypt => { + Hasher::bcrypt(cost, secret, unicode_normalization) + } + mas_config::PasswordAlgorithm::Argon2id => { + Hasher::argon2id(secret, unicode_normalization) + } + }; + + (version, hasher) + }, + ); + + PasswordManager::new(config.minimum_complexity(), schemes) +} + +pub fn mailer_from_config( + config: &EmailConfig, + templates: &Templates, +) -> Result { + let from = config + .from + .parse() + .context("invalid email configuration: invalid 'from' address")?; + let reply_to = config + .reply_to + .parse() + .context("invalid email configuration: invalid 'reply_to' address")?; + let transport = match config.transport() { + EmailTransportKind::Blackhole => MailTransport::blackhole(), + EmailTransportKind::Smtp => { + // This should have been set ahead of time + let hostname = config + .hostname() + .context("invalid email configuration: missing hostname")?; + + let mode = config + .mode() + .context("invalid email configuration: missing mode")?; + + let credentials = match (config.username(), config.password()) { + (Some(username), Some(password)) => Some(mas_email::SmtpCredentials::new( + username.to_owned(), + password.to_owned(), + )), + (None, None) => None, + _ => { + anyhow::bail!("invalid email configuration: missing username or password"); + } + }; + + let mode = match mode { + EmailSmtpMode::Plain => mas_email::SmtpMode::Plain, + EmailSmtpMode::StartTls => mas_email::SmtpMode::StartTls, + EmailSmtpMode::Tls => mas_email::SmtpMode::Tls, + }; + + MailTransport::smtp(mode, hostname, config.port(), credentials) + .context("failed to build SMTP transport")? + } + EmailTransportKind::Sendmail => MailTransport::sendmail(config.command()), + }; + + Ok(Mailer::new(templates.clone(), transport, from, reply_to)) +} + +/// Test the connection to the mailer in a background task +pub fn test_mailer_in_background(mailer: &Mailer, timeout: Duration) { + let mailer = mailer.clone(); + + let span = tracing::info_span!("cli.test_mailer"); + tokio::spawn( + LogContext::new("mailer-test").run(async move || { + match tokio::time::timeout(timeout, mailer.test_connection()).await { + Ok(Ok(())) => {} + Ok(Err(err)) => { + tracing::warn!( + error = &err as &dyn std::error::Error, + "Could not connect to the mail backend, tasks sending mails may fail!" + ); + } + Err(_) => { + tracing::warn!("Timed out while testing the mail backend connection, tasks sending mails may fail!"); + } + } + }) + .instrument(span) + ); +} + +pub async fn policy_factory_from_config( + config: &PolicyConfig, + matrix_config: &MatrixConfig, + experimental_config: &ExperimentalConfig, +) -> Result { + let policy_file = tokio::fs::File::open(&config.wasm_module) + .await + .context("failed to open OPA WASM policy file")?; + + let entrypoints = mas_policy::Entrypoints { + register: config.register_entrypoint.clone(), + client_registration: config.client_registration_entrypoint.clone(), + authorization_grant: config.authorization_grant_entrypoint.clone(), + compat_login: config.compat_login_entrypoint.clone(), + email: config.email_entrypoint.clone(), + }; + + let session_limit_config = + experimental_config + .session_limit + .as_ref() + .map(|c| SessionLimitConfig { + soft_limit: c.soft_limit, + hard_limit: c.hard_limit, + }); + + let data = mas_policy::Data::new(matrix_config.homeserver.clone(), session_limit_config) + .with_rest(config.data.clone()); + + PolicyFactory::load(policy_file, data, entrypoints) + .await + .context("failed to load the policy") +} + +pub fn captcha_config_from_config( + captcha_config: &CaptchaConfig, +) -> Result, anyhow::Error> { + let Some(service) = captcha_config.service else { + return Ok(None); + }; + + let service = match service { + mas_config::CaptchaServiceKind::RecaptchaV2 => mas_data_model::CaptchaService::RecaptchaV2, + mas_config::CaptchaServiceKind::CloudflareTurnstile => { + mas_data_model::CaptchaService::CloudflareTurnstile + } + mas_config::CaptchaServiceKind::HCaptcha => mas_data_model::CaptchaService::HCaptcha, + }; + + Ok(Some(mas_data_model::CaptchaConfig { + service, + site_key: captcha_config + .site_key + .clone() + .context("missing site key")?, + secret_key: captcha_config + .secret_key + .clone() + .context("missing secret key")?, + })) +} + +pub fn site_config_from_config( + branding_config: &BrandingConfig, + matrix_config: &MatrixConfig, + experimental_config: &ExperimentalConfig, + password_config: &PasswordsConfig, + account_config: &AccountConfig, + captcha_config: &CaptchaConfig, +) -> Result { + let captcha = captcha_config_from_config(captcha_config)?; + let session_expiration = experimental_config + .inactive_session_expiration + .as_ref() + .map(|c| SessionExpirationConfig { + oauth_session_inactivity_ttl: c.expire_oauth_sessions.then_some(c.ttl), + compat_session_inactivity_ttl: c.expire_compat_sessions.then_some(c.ttl), + user_session_inactivity_ttl: c.expire_user_sessions.then_some(c.ttl), + }); + + Ok(SiteConfig { + access_token_ttl: experimental_config.access_token_ttl, + compat_token_ttl: experimental_config.compat_token_ttl, + server_name: matrix_config.homeserver.clone(), + policy_uri: branding_config.policy_uri.clone(), + tos_uri: branding_config.tos_uri.clone(), + imprint: branding_config.imprint.clone(), + password_login_enabled: password_config.enabled(), + password_registration_enabled: password_config.enabled() + && account_config.password_registration_enabled, + password_registration_email_required: account_config.password_registration_email_required, + registration_token_required: account_config.registration_token_required, + email_change_allowed: account_config.email_change_allowed, + displayname_change_allowed: account_config.displayname_change_allowed, + password_change_allowed: password_config.enabled() + && account_config.password_change_allowed, + account_recovery_allowed: password_config.enabled() + && account_config.password_recovery_enabled, + account_deactivation_allowed: account_config.account_deactivation_allowed, + captcha, + minimum_password_complexity: password_config.minimum_complexity(), + session_expiration, + login_with_email_allowed: account_config.login_with_email_allowed, + plan_management_iframe_uri: experimental_config.plan_management_iframe_uri.clone(), + session_limit: experimental_config + .session_limit + .as_ref() + .map(|c| SessionLimitConfig { + soft_limit: c.soft_limit, + hard_limit: c.hard_limit, + }), + }) +} + +pub async fn templates_from_config( + config: &TemplatesConfig, + site_config: &SiteConfig, + url_builder: &UrlBuilder, + strict: bool, + stabilise: bool, +) -> Result { + Templates::load( + config.path.clone(), + url_builder.clone(), + (!stabilise).then(|| config.assets_manifest.clone()), + config.translations_path.clone(), + site_config.templates_branding(), + site_config.templates_features(), + strict, + ) + .await + .with_context(|| format!("Failed to load the templates at {}", config.path)) +} + +fn database_connect_options_from_config( + config: &DatabaseConfig, + opts: &DatabaseConnectOptions, +) -> Result { + let options = if let Some(uri) = config.uri.as_deref() { + uri.parse() + .context("could not parse database connection string")? + } else { + let mut opts = PgConnectOptions::new().application_name("matrix-authentication-service"); + + if let Some(host) = config.host.as_deref() { + opts = opts.host(host); + } + + if let Some(port) = config.port { + opts = opts.port(port); + } + + if let Some(socket) = config.socket.as_deref() { + opts = opts.socket(socket); + } + + if let Some(username) = config.username.as_deref() { + opts = opts.username(username); + } + + if let Some(password) = config.password.as_deref() { + opts = opts.password(password); + } + + if let Some(database) = config.database.as_deref() { + opts = opts.database(database); + } + + opts + }; + + let options = match (config.ssl_ca.as_deref(), config.ssl_ca_file.as_deref()) { + (None, None) => options, + (Some(pem), None) => options.ssl_root_cert_from_pem(pem.as_bytes().to_owned()), + (None, Some(path)) => options.ssl_root_cert(path), + (Some(_), Some(_)) => { + anyhow::bail!("invalid database configuration: both `ssl_ca` and `ssl_ca_file` are set") + } + }; + + let options = match ( + config.ssl_certificate.as_deref(), + config.ssl_certificate_file.as_deref(), + ) { + (None, None) => options, + (Some(pem), None) => options.ssl_client_cert_from_pem(pem.as_bytes()), + (None, Some(path)) => options.ssl_client_cert(path), + (Some(_), Some(_)) => { + anyhow::bail!( + "invalid database configuration: both `ssl_certificate` and `ssl_certificate_file` are set" + ) + } + }; + + let options = match (config.ssl_key.as_deref(), config.ssl_key_file.as_deref()) { + (None, None) => options, + (Some(pem), None) => options.ssl_client_key_from_pem(pem.as_bytes()), + (None, Some(path)) => options.ssl_client_key(path), + (Some(_), Some(_)) => { + anyhow::bail!( + "invalid database configuration: both `ssl_key` and `ssl_key_file` are set" + ) + } + }; + + let options = match &config.ssl_mode { + Some(ssl_mode) => { + let ssl_mode = match ssl_mode { + mas_config::PgSslMode::Disable => sqlx::postgres::PgSslMode::Disable, + mas_config::PgSslMode::Allow => sqlx::postgres::PgSslMode::Allow, + mas_config::PgSslMode::Prefer => sqlx::postgres::PgSslMode::Prefer, + mas_config::PgSslMode::Require => sqlx::postgres::PgSslMode::Require, + mas_config::PgSslMode::VerifyCa => sqlx::postgres::PgSslMode::VerifyCa, + mas_config::PgSslMode::VerifyFull => sqlx::postgres::PgSslMode::VerifyFull, + }; + + options.ssl_mode(ssl_mode) + } + None => options, + }; + + let mut options = options.log_statements(LevelFilter::Debug); + + if opts.log_slow_statements { + options = options.log_slow_statements(LevelFilter::Warn, Duration::from_millis(100)); + } + + Ok(options) +} + +/// Create a database connection pool from the configuration +#[tracing::instrument(name = "db.connect", skip_all)] +pub async fn database_pool_from_config(config: &DatabaseConfig) -> Result { + let options = database_connect_options_from_config(config, &DatabaseConnectOptions::default())?; + PgPoolOptions::new() + .max_connections(config.max_connections.into()) + .min_connections(config.min_connections) + .acquire_timeout(config.connect_timeout) + .idle_timeout(config.idle_timeout) + .max_lifetime(config.max_lifetime) + .after_connect(|conn, _meta| { + Box::pin(async move { + // Unlisten from all channels, as we might be connected via a connection pooler + // that doesn't clean up LISTEN/NOTIFY state when reusing connections. + conn.execute("UNLISTEN *;").await?; + + Ok(()) + }) + }) + .connect_with(options) + .await + .context("could not connect to the database") +} + +pub struct DatabaseConnectOptions { + pub log_slow_statements: bool, +} + +impl Default for DatabaseConnectOptions { + fn default() -> Self { + Self { + log_slow_statements: true, + } + } +} + +/// Create a single database connection from the configuration +#[tracing::instrument(name = "db.connect", skip_all)] +pub async fn database_connection_from_config( + config: &DatabaseConfig, +) -> Result { + database_connect_options_from_config(config, &DatabaseConnectOptions::default())? + .connect() + .await + .context("could not connect to the database") +} + +/// Create a single database connection from the configuration, +/// with specific options. +#[tracing::instrument(name = "db.connect", skip_all)] +pub async fn database_connection_from_config_with_options( + config: &DatabaseConfig, + options: &DatabaseConnectOptions, +) -> Result { + database_connect_options_from_config(config, options)? + .connect() + .await + .context("could not connect to the database") +} + +/// Update the policy factory dynamic data from the database and spawn a task to +/// periodically update it +// XXX: this could be put somewhere else? +pub async fn load_policy_factory_dynamic_data_continuously( + policy_factory: &Arc, + repository_factory: BoxRepositoryFactory, + cancellation_token: CancellationToken, + task_tracker: &TaskTracker, +) -> Result<(), anyhow::Error> { + let policy_factory = policy_factory.clone(); + + load_policy_factory_dynamic_data(&policy_factory, &*repository_factory).await?; + + task_tracker.spawn(async move { + let mut interval = tokio::time::interval(Duration::from_secs(60)); + + loop { + tokio::select! { + () = cancellation_token.cancelled() => { + return; + } + _ = interval.tick() => {} + } + + if let Err(err) = + load_policy_factory_dynamic_data(&policy_factory, &*repository_factory).await + { + tracing::error!( + error = ?err, + "Failed to load policy factory dynamic data" + ); + cancellation_token.cancel(); + return; + } + } + }); + + Ok(()) +} + +/// Update the policy factory dynamic data from the database +#[tracing::instrument(name = "policy.load_dynamic_data", skip_all)] +pub async fn load_policy_factory_dynamic_data( + policy_factory: &PolicyFactory, + repository_factory: &(dyn RepositoryFactory + Send + Sync), +) -> Result<(), anyhow::Error> { + let mut repo = repository_factory + .create() + .await + .context("Failed to acquire database connection")?; + + if let Some(data) = repo.policy_data().get().await? { + let id = data.id; + let updated = policy_factory.set_dynamic_data(data).await?; + if updated { + tracing::info!(policy_data.id = %id, "Loaded dynamic policy data from the database"); + } + } + + Ok(()) +} + +/// Create a clonable, type-erased [`HomeserverConnection`] from the +/// configuration +pub async fn homeserver_connection_from_config( + config: &MatrixConfig, + http_client: reqwest::Client, +) -> anyhow::Result> { + Ok(match config.kind { + HomeserverKind::Synapse | HomeserverKind::SynapseModern => { + Arc::new(SynapseConnection::new( + config.homeserver.clone(), + config.endpoint.clone(), + config.secret().await?, + http_client, + )) + } + HomeserverKind::SynapseLegacy => Arc::new(LegacySynapseConnection::new( + config.homeserver.clone(), + config.endpoint.clone(), + config.secret().await?, + http_client, + )), + HomeserverKind::SynapseReadOnly => { + let connection = SynapseConnection::new( + config.homeserver.clone(), + config.endpoint.clone(), + config.secret().await?, + http_client, + ); + let readonly = ReadOnlyHomeserverConnection::new(connection); + Arc::new(readonly) + } + }) +} + +#[cfg(test)] +mod tests { + use rand::SeedableRng; + use zeroize::Zeroizing; + + use super::*; + + #[tokio::test] + async fn test_password_manager_from_config() { + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + let password = Zeroizing::new("hunter2".to_owned()); + + // Test a valid, enabled config + let config = serde_json::from_value(serde_json::json!({ + "schemes": [{ + "version": 42, + "algorithm": "argon2id" + }, { + "version": 10, + "algorithm": "bcrypt" + }] + })) + .unwrap(); + + let manager = password_manager_from_config(&config).await; + assert!(manager.is_ok()); + let manager = manager.unwrap(); + assert!(manager.is_enabled()); + let hashed = manager.hash(&mut rng, password.clone()).await; + assert!(hashed.is_ok()); + let (version, hashed) = hashed.unwrap(); + assert_eq!(version, 42); + assert!(hashed.starts_with("$argon2id$")); + + // Test a valid, disabled config + let config = serde_json::from_value(serde_json::json!({ + "enabled": false, + "schemes": [] + })) + .unwrap(); + + let manager = password_manager_from_config(&config).await; + assert!(manager.is_ok()); + let manager = manager.unwrap(); + assert!(!manager.is_enabled()); + let res = manager.hash(&mut rng, password.clone()).await; + assert!(res.is_err()); + + // Test an invalid config + // Repeat the same version twice + let config = serde_json::from_value(serde_json::json!({ + "schemes": [{ + "version": 42, + "algorithm": "argon2id" + }, { + "version": 42, + "algorithm": "bcrypt" + }] + })) + .unwrap(); + let manager = password_manager_from_config(&config).await; + assert!(manager.is_err()); + + // Empty schemes + let config = serde_json::from_value(serde_json::json!({ + "schemes": [] + })) + .unwrap(); + let manager = password_manager_from_config(&config).await; + assert!(manager.is_err()); + } +} diff --git a/matrix-authentication-service/crates/config/Cargo.toml b/matrix-authentication-service/crates/config/Cargo.toml new file mode 100644 index 00000000..4882647a --- /dev/null +++ b/matrix-authentication-service/crates/config/Cargo.toml @@ -0,0 +1,53 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-config" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +camino.workspace = true +chrono.workspace = true +figment.workspace = true +futures-util.workspace = true +governor.workspace = true +hex.workspace = true +indoc.workspace = true +ipnetwork.workspace = true +lettre.workspace = true +pem-rfc7468.workspace = true +rand_chacha.workspace = true +rand.workspace = true +rustls-pki-types.workspace = true +schemars.workspace = true +serde_json.workspace = true +serde_with.workspace = true +serde.workspace = true +tokio.workspace = true +tracing.workspace = true +ulid.workspace = true +url.workspace = true + +mas-jose.workspace = true +mas-keystore.workspace = true +mas-iana.workspace = true + +[features] +docker = [] +dist = [] + +[[bin]] +name = "schema" +doc = false diff --git a/matrix-authentication-service/crates/config/src/bin/schema.rs b/matrix-authentication-service/crates/config/src/bin/schema.rs new file mode 100644 index 00000000..db5ac230 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/bin/schema.rs @@ -0,0 +1,14 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use schemars::generate::SchemaSettings; + +fn main() { + let generator = SchemaSettings::draft07().into_generator(); + let schema = generator.into_root_schema_for::(); + + serde_json::to_writer_pretty(std::io::stdout(), &schema).expect("Failed to serialize schema"); +} diff --git a/matrix-authentication-service/crates/config/src/lib.rs b/matrix-authentication-service/crates/config/src/lib.rs new file mode 100644 index 00000000..cdf68e42 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/lib.rs @@ -0,0 +1,24 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![deny(missing_docs, rustdoc::missing_crate_level_docs)] +#![allow(clippy::module_name_repetitions)] +// derive(JSONSchema) uses &str.to_string() +#![allow(clippy::str_to_string)] + +//! Application configuration logic + +#[cfg(all(feature = "docker", feature = "dist"))] +compile_error!("Only one of the `docker` and `dist` features can be enabled at once"); + +pub(crate) mod schema; +mod sections; +pub(crate) mod util; + +pub use self::{ + sections::*, + util::{ConfigurationSection, ConfigurationSectionExt}, +}; diff --git a/matrix-authentication-service/crates/config/src/schema.rs b/matrix-authentication-service/crates/config/src/schema.rs new file mode 100644 index 00000000..7c1761e3 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/schema.rs @@ -0,0 +1,27 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Useful JSON Schema definitions + +use std::borrow::Cow; + +use schemars::{JsonSchema, Schema, SchemaGenerator, json_schema}; + +/// A network hostname +pub struct Hostname; + +impl JsonSchema for Hostname { + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("Hostname") + } + + fn json_schema(_generator: &mut SchemaGenerator) -> Schema { + json_schema!({ + "type": "string", + "format": "hostname", + }) + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/account.rs b/matrix-authentication-service/crates/config/src/sections/account.rs new file mode 100644 index 00000000..2b6538a2 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/account.rs @@ -0,0 +1,125 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use crate::ConfigurationSection; + +const fn default_true() -> bool { + true +} + +#[allow(clippy::trivially_copy_pass_by_ref)] +const fn is_default_true(value: &bool) -> bool { + *value == default_true() +} + +const fn default_false() -> bool { + false +} + +#[allow(clippy::trivially_copy_pass_by_ref)] +const fn is_default_false(value: &bool) -> bool { + *value == default_false() +} + +/// Configuration section to configure features related to account management +#[allow(clippy::struct_excessive_bools)] +#[derive(Clone, Debug, Deserialize, JsonSchema, Serialize)] +pub struct AccountConfig { + /// Whether users are allowed to change their email addresses. Defaults to + /// `true`. + #[serde(default = "default_true", skip_serializing_if = "is_default_true")] + pub email_change_allowed: bool, + + /// Whether users are allowed to change their display names. Defaults to + /// `true`. + /// + /// This should be in sync with the policy in the homeserver configuration. + #[serde(default = "default_true", skip_serializing_if = "is_default_true")] + pub displayname_change_allowed: bool, + + /// Whether to enable self-service password registration. Defaults to + /// `false` if password authentication is enabled. + /// + /// This has no effect if password login is disabled. + #[serde(default = "default_false", skip_serializing_if = "is_default_false")] + pub password_registration_enabled: bool, + + /// Whether self-service password registrations require a valid email. + /// Defaults to `true`. + /// + /// This has no effect if password registration is disabled. + #[serde(default = "default_true", skip_serializing_if = "is_default_true")] + pub password_registration_email_required: bool, + + /// Whether users are allowed to change their passwords. Defaults to `true`. + /// + /// This has no effect if password login is disabled. + #[serde(default = "default_true", skip_serializing_if = "is_default_true")] + pub password_change_allowed: bool, + + /// Whether email-based password recovery is enabled. Defaults to `false`. + /// + /// This has no effect if password login is disabled. + #[serde(default = "default_false", skip_serializing_if = "is_default_false")] + pub password_recovery_enabled: bool, + + /// Whether users are allowed to delete their own account. Defaults to + /// `true`. + #[serde(default = "default_true", skip_serializing_if = "is_default_true")] + pub account_deactivation_allowed: bool, + + /// Whether users can log in with their email address. Defaults to `false`. + /// + /// This has no effect if password login is disabled. + #[serde(default = "default_false", skip_serializing_if = "is_default_false")] + pub login_with_email_allowed: bool, + + /// Whether registration tokens are required for password registrations. + /// Defaults to `false`. + /// + /// When enabled, users must provide a valid registration token during + /// password registration. This has no effect if password registration + /// is disabled. + #[serde(default = "default_false", skip_serializing_if = "is_default_false")] + pub registration_token_required: bool, +} + +impl Default for AccountConfig { + fn default() -> Self { + Self { + email_change_allowed: default_true(), + displayname_change_allowed: default_true(), + password_registration_enabled: default_false(), + password_registration_email_required: default_true(), + password_change_allowed: default_true(), + password_recovery_enabled: default_false(), + account_deactivation_allowed: default_true(), + login_with_email_allowed: default_false(), + registration_token_required: default_false(), + } + } +} + +impl AccountConfig { + /// Returns true if the configuration is the default one + pub(crate) fn is_default(&self) -> bool { + is_default_false(&self.password_registration_enabled) + && is_default_true(&self.email_change_allowed) + && is_default_true(&self.displayname_change_allowed) + && is_default_true(&self.password_change_allowed) + && is_default_false(&self.password_recovery_enabled) + && is_default_true(&self.account_deactivation_allowed) + && is_default_false(&self.login_with_email_allowed) + && is_default_false(&self.registration_token_required) + } +} + +impl ConfigurationSection for AccountConfig { + const PATH: Option<&'static str> = Some("account"); +} diff --git a/matrix-authentication-service/crates/config/src/sections/branding.rs b/matrix-authentication-service/crates/config/src/sections/branding.rs new file mode 100644 index 00000000..ec36f161 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/branding.rs @@ -0,0 +1,55 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use url::Url; + +use crate::ConfigurationSection; + +/// Configuration section for tweaking the branding of the service +#[derive(Clone, Debug, Deserialize, JsonSchema, Serialize, Default)] +pub struct BrandingConfig { + /// A human-readable name. Defaults to the server's address. + #[serde(skip_serializing_if = "Option::is_none")] + pub service_name: Option, + + /// Link to a privacy policy, displayed in the footer of web pages and + /// emails. It is also advertised to clients through the `op_policy_uri` + /// OIDC provider metadata. + #[serde(skip_serializing_if = "Option::is_none")] + pub policy_uri: Option, + + /// Link to a terms of service document, displayed in the footer of web + /// pages and emails. It is also advertised to clients through the + /// `op_tos_uri` OIDC provider metadata. + #[serde(skip_serializing_if = "Option::is_none")] + pub tos_uri: Option, + + /// Legal imprint, displayed in the footer in the footer of web pages and + /// emails. + #[serde(skip_serializing_if = "Option::is_none")] + pub imprint: Option, + + /// Logo displayed in some web pages. + #[serde(skip_serializing_if = "Option::is_none")] + pub logo_uri: Option, +} + +impl BrandingConfig { + /// Returns true if the configuration is the default one + pub(crate) fn is_default(&self) -> bool { + self.service_name.is_none() + && self.policy_uri.is_none() + && self.tos_uri.is_none() + && self.imprint.is_none() + && self.logo_uri.is_none() + } +} + +impl ConfigurationSection for BrandingConfig { + const PATH: Option<&'static str> = Some("branding"); +} diff --git a/matrix-authentication-service/crates/config/src/sections/captcha.rs b/matrix-authentication-service/crates/config/src/sections/captcha.rs new file mode 100644 index 00000000..962d1f34 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/captcha.rs @@ -0,0 +1,83 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use schemars::JsonSchema; +use serde::{Deserialize, Serialize, de::Error}; + +use crate::ConfigurationSection; + +/// Which service should be used for CAPTCHA protection +#[derive(Clone, Copy, Debug, Deserialize, JsonSchema, Serialize)] +pub enum CaptchaServiceKind { + /// Use Google's reCAPTCHA v2 API + #[serde(rename = "recaptcha_v2")] + RecaptchaV2, + + /// Use Cloudflare Turnstile + #[serde(rename = "cloudflare_turnstile")] + CloudflareTurnstile, + + /// Use ``HCaptcha`` + #[serde(rename = "hcaptcha")] + HCaptcha, +} + +/// Configuration section to setup CAPTCHA protection on a few operations +#[derive(Clone, Debug, Deserialize, JsonSchema, Serialize, Default)] +pub struct CaptchaConfig { + /// Which service should be used for CAPTCHA protection + #[serde(skip_serializing_if = "Option::is_none")] + pub service: Option, + + /// The site key to use + #[serde(skip_serializing_if = "Option::is_none")] + pub site_key: Option, + + /// The secret key to use + #[serde(skip_serializing_if = "Option::is_none")] + pub secret_key: Option, +} + +impl CaptchaConfig { + /// Returns true if the configuration is the default one + pub(crate) fn is_default(&self) -> bool { + self.service.is_none() && self.site_key.is_none() && self.secret_key.is_none() + } +} + +impl ConfigurationSection for CaptchaConfig { + const PATH: Option<&'static str> = Some("captcha"); + + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + let metadata = figment.find_metadata(Self::PATH.unwrap()); + + let error_on_field = |mut error: figment::error::Error, field: &'static str| { + error.metadata = metadata.cloned(); + error.profile = Some(figment::Profile::Default); + error.path = vec![Self::PATH.unwrap().to_owned(), field.to_owned()]; + error + }; + + let missing_field = |field: &'static str| { + error_on_field(figment::error::Error::missing_field(field), field) + }; + + if let Some(CaptchaServiceKind::RecaptchaV2) = self.service { + if self.site_key.is_none() { + return Err(missing_field("site_key").into()); + } + + if self.secret_key.is_none() { + return Err(missing_field("secret_key").into()); + } + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/clients.rs b/matrix-authentication-service/crates/config/src/sections/clients.rs new file mode 100644 index 00000000..2387d642 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/clients.rs @@ -0,0 +1,353 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::ops::Deref; + +use mas_iana::oauth::OAuthClientAuthenticationMethod; +use mas_jose::jwk::PublicJsonWebKeySet; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize, de::Error}; +use serde_with::serde_as; +use ulid::Ulid; +use url::Url; + +use super::{ClientSecret, ClientSecretRaw, ConfigurationSection}; + +/// Authentication method used by clients +#[derive(JsonSchema, Serialize, Deserialize, Copy, Clone, Debug)] +#[serde(rename_all = "snake_case")] +pub enum ClientAuthMethodConfig { + /// `none`: No authentication + None, + + /// `client_secret_basic`: `client_id` and `client_secret` used as basic + /// authorization credentials + ClientSecretBasic, + + /// `client_secret_post`: `client_id` and `client_secret` sent in the + /// request body + ClientSecretPost, + + /// `client_secret_basic`: a `client_assertion` sent in the request body and + /// signed using the `client_secret` + ClientSecretJwt, + + /// `client_secret_basic`: a `client_assertion` sent in the request body and + /// signed by an asymmetric key + PrivateKeyJwt, +} + +impl std::fmt::Display for ClientAuthMethodConfig { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ClientAuthMethodConfig::None => write!(f, "none"), + ClientAuthMethodConfig::ClientSecretBasic => write!(f, "client_secret_basic"), + ClientAuthMethodConfig::ClientSecretPost => write!(f, "client_secret_post"), + ClientAuthMethodConfig::ClientSecretJwt => write!(f, "client_secret_jwt"), + ClientAuthMethodConfig::PrivateKeyJwt => write!(f, "private_key_jwt"), + } + } +} + +/// An OAuth 2.0 client configuration +#[serde_as] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct ClientConfig { + /// The client ID + #[schemars( + with = "String", + regex(pattern = r"^[0123456789ABCDEFGHJKMNPQRSTVWXYZ]{26}$"), + description = "A ULID as per https://github.com/ulid/spec" + )] + pub client_id: Ulid, + + /// Authentication method used for this client + client_auth_method: ClientAuthMethodConfig, + + /// Name of the `OAuth2` client + #[serde(skip_serializing_if = "Option::is_none")] + pub client_name: Option, + + /// The client secret, used by the `client_secret_basic`, + /// `client_secret_post` and `client_secret_jwt` authentication methods + #[schemars(with = "ClientSecretRaw")] + #[serde_as(as = "serde_with::TryFromInto")] + #[serde(flatten)] + pub client_secret: Option, + + /// The JSON Web Key Set (JWKS) used by the `private_key_jwt` authentication + /// method. Mutually exclusive with `jwks_uri` + #[serde(skip_serializing_if = "Option::is_none")] + pub jwks: Option, + + /// The URL of the JSON Web Key Set (JWKS) used by the `private_key_jwt` + /// authentication method. Mutually exclusive with `jwks` + #[serde(skip_serializing_if = "Option::is_none")] + pub jwks_uri: Option, + + /// List of allowed redirect URIs + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub redirect_uris: Vec, +} + +impl ClientConfig { + fn validate(&self) -> Result<(), Box> { + let auth_method = self.client_auth_method; + match self.client_auth_method { + ClientAuthMethodConfig::PrivateKeyJwt => { + if self.jwks.is_none() && self.jwks_uri.is_none() { + let error = figment::error::Error::custom( + "jwks or jwks_uri is required for private_key_jwt", + ); + return Err(Box::new(error.with_path("client_auth_method"))); + } + + if self.jwks.is_some() && self.jwks_uri.is_some() { + let error = + figment::error::Error::custom("jwks and jwks_uri are mutually exclusive"); + return Err(Box::new(error.with_path("jwks"))); + } + + if self.client_secret.is_some() { + let error = figment::error::Error::custom( + "client_secret is not allowed with private_key_jwt", + ); + return Err(Box::new(error.with_path("client_secret"))); + } + } + + ClientAuthMethodConfig::ClientSecretPost + | ClientAuthMethodConfig::ClientSecretBasic + | ClientAuthMethodConfig::ClientSecretJwt => { + if self.client_secret.is_none() { + let error = figment::error::Error::custom(format!( + "client_secret is required for {auth_method}" + )); + return Err(Box::new(error.with_path("client_auth_method"))); + } + + if self.jwks.is_some() { + let error = figment::error::Error::custom(format!( + "jwks is not allowed with {auth_method}" + )); + return Err(Box::new(error.with_path("jwks"))); + } + + if self.jwks_uri.is_some() { + let error = figment::error::Error::custom(format!( + "jwks_uri is not allowed with {auth_method}" + )); + return Err(Box::new(error.with_path("jwks_uri"))); + } + } + + ClientAuthMethodConfig::None => { + if self.client_secret.is_some() { + let error = figment::error::Error::custom( + "client_secret is not allowed with none authentication method", + ); + return Err(Box::new(error.with_path("client_secret"))); + } + + if self.jwks.is_some() { + let error = figment::error::Error::custom( + "jwks is not allowed with none authentication method", + ); + return Err(Box::new(error)); + } + + if self.jwks_uri.is_some() { + let error = figment::error::Error::custom( + "jwks_uri is not allowed with none authentication method", + ); + return Err(Box::new(error)); + } + } + } + + Ok(()) + } + + /// Authentication method used for this client + #[must_use] + pub fn client_auth_method(&self) -> OAuthClientAuthenticationMethod { + match self.client_auth_method { + ClientAuthMethodConfig::None => OAuthClientAuthenticationMethod::None, + ClientAuthMethodConfig::ClientSecretBasic => { + OAuthClientAuthenticationMethod::ClientSecretBasic + } + ClientAuthMethodConfig::ClientSecretPost => { + OAuthClientAuthenticationMethod::ClientSecretPost + } + ClientAuthMethodConfig::ClientSecretJwt => { + OAuthClientAuthenticationMethod::ClientSecretJwt + } + ClientAuthMethodConfig::PrivateKeyJwt => OAuthClientAuthenticationMethod::PrivateKeyJwt, + } + } + + /// Returns the client secret. + /// + /// If `client_secret_file` was given, the secret is read from that file. + /// + /// # Errors + /// + /// Returns an error when the client secret could not be read from file. + pub async fn client_secret(&self) -> anyhow::Result> { + Ok(match &self.client_secret { + Some(client_secret) => Some(client_secret.value().await?), + None => None, + }) + } +} + +/// List of OAuth 2.0/OIDC clients config +#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] +#[serde(transparent)] +pub struct ClientsConfig(#[schemars(with = "Vec::")] Vec); + +impl ClientsConfig { + /// Returns true if all fields are at their default values + pub(crate) fn is_default(&self) -> bool { + self.0.is_empty() + } +} + +impl Deref for ClientsConfig { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl IntoIterator for ClientsConfig { + type Item = ClientConfig; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl ConfigurationSection for ClientsConfig { + const PATH: Option<&'static str> = Some("clients"); + + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + for (index, client) in self.0.iter().enumerate() { + client.validate().map_err(|mut err| { + // Save the error location information in the error + err.metadata = figment.find_metadata(Self::PATH.unwrap()).cloned(); + err.profile = Some(figment::Profile::Default); + err.path.insert(0, Self::PATH.unwrap().to_owned()); + err.path.insert(1, format!("{index}")); + err + })?; + } + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use figment::{ + Figment, Jail, + providers::{Format, Yaml}, + }; + use tokio::{runtime::Handle, task}; + + use super::*; + + #[tokio::test] + async fn load_config() { + task::spawn_blocking(|| { + Jail::expect_with(|jail| { + jail.create_file( + "config.yaml", + r#" + clients: + - client_id: 01GFWR28C4KNE04WG3HKXB7C9R + client_auth_method: none + redirect_uris: + - https://exemple.fr/callback + + - client_id: 01GFWR32NCQ12B8Z0J8CPXRRB6 + client_auth_method: client_secret_basic + client_secret_file: secret + + - client_id: 01GFWR3WHR93Y5HK389H28VHZ9 + client_auth_method: client_secret_post + client_secret: c1!3n753c237 + + - client_id: 01GFWR43R2ZZ8HX9CVBNW9TJWG + client_auth_method: client_secret_jwt + client_secret_file: secret + + - client_id: 01GFWR4BNFDCC4QDG6AMSP1VRR + client_auth_method: private_key_jwt + jwks: + keys: + - kid: "03e84aed4ef4431014e8617567864c4efaaaede9" + kty: "RSA" + alg: "RS256" + use: "sig" + e: "AQAB" + n: "ma2uRyBeSEOatGuDpCiV9oIxlDWix_KypDYuhQfEzqi_BiF4fV266OWfyjcABbam59aJMNvOnKW3u_eZM-PhMCBij5MZ-vcBJ4GfxDJeKSn-GP_dJ09rpDcILh8HaWAnPmMoi4DC0nrfE241wPISvZaaZnGHkOrfN_EnA5DligLgVUbrA5rJhQ1aSEQO_gf1raEOW3DZ_ACU3qhtgO0ZBG3a5h7BPiRs2sXqb2UCmBBgwyvYLDebnpE7AotF6_xBIlR-Cykdap3GHVMXhrIpvU195HF30ZoBU4dMd-AeG6HgRt4Cqy1moGoDgMQfbmQ48Hlunv9_Vi2e2CLvYECcBw" + + - kid: "d01c1abe249269f72ef7ca2613a86c9f05e59567" + kty: "RSA" + alg: "RS256" + use: "sig" + e: "AQAB" + n: "0hukqytPwrj1RbMYhYoepCi3CN5k7DwYkTe_Cmb7cP9_qv4ok78KdvFXt5AnQxCRwBD7-qTNkkfMWO2RxUMBdQD0ED6tsSb1n5dp0XY8dSWiBDCX8f6Hr-KolOpvMLZKRy01HdAWcM6RoL9ikbjYHUEW1C8IJnw3MzVHkpKFDL354aptdNLaAdTCBvKzU9WpXo10g-5ctzSlWWjQuecLMQ4G1mNdsR1LHhUENEnOvgT8cDkX0fJzLbEbyBYkdMgKggyVPEB1bg6evG4fTKawgnf0IDSPxIU-wdS9wdSP9ZCJJPLi5CEp-6t6rE_sb2dGcnzjCGlembC57VwpkUvyMw" + "#, + )?; + jail.create_file("secret", r"c1!3n753c237")?; + + let config = Figment::new() + .merge(Yaml::file("config.yaml")) + .extract_inner::("clients")?; + + assert_eq!(config.0.len(), 5); + + assert_eq!( + config.0[0].client_id, + Ulid::from_str("01GFWR28C4KNE04WG3HKXB7C9R").unwrap() + ); + assert_eq!( + config.0[0].redirect_uris, + vec!["https://exemple.fr/callback".parse().unwrap()] + ); + + assert_eq!( + config.0[1].client_id, + Ulid::from_str("01GFWR32NCQ12B8Z0J8CPXRRB6").unwrap() + ); + assert_eq!(config.0[1].redirect_uris, Vec::new()); + + assert!(config.0[0].client_secret.is_none()); + assert!(matches!(config.0[1].client_secret, Some(ClientSecret::File(ref p)) if p == "secret")); + assert!(matches!(config.0[2].client_secret, Some(ClientSecret::Value(ref v)) if v == "c1!3n753c237")); + assert!(matches!(config.0[3].client_secret, Some(ClientSecret::File(ref p)) if p == "secret")); + assert!(config.0[4].client_secret.is_none()); + + Handle::current().block_on(async move { + assert_eq!(config.0[1].client_secret().await.unwrap().unwrap(), "c1!3n753c237"); + assert_eq!(config.0[2].client_secret().await.unwrap().unwrap(), "c1!3n753c237"); + assert_eq!(config.0[3].client_secret().await.unwrap().unwrap(), "c1!3n753c237"); + }); + + Ok(()) + }); + }).await.unwrap(); + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/database.rs b/matrix-authentication-service/crates/config/src/sections/database.rs new file mode 100644 index 00000000..4830a401 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/database.rs @@ -0,0 +1,319 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{num::NonZeroU32, time::Duration}; + +use camino::Utf8PathBuf; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; + +use super::ConfigurationSection; +use crate::schema; + +#[allow(clippy::unnecessary_wraps)] +fn default_connection_string() -> Option { + Some("postgresql://".to_owned()) +} + +fn default_max_connections() -> NonZeroU32 { + NonZeroU32::new(10).unwrap() +} + +fn default_connect_timeout() -> Duration { + Duration::from_secs(30) +} + +#[allow(clippy::unnecessary_wraps)] +fn default_idle_timeout() -> Option { + Some(Duration::from_secs(10 * 60)) +} + +#[allow(clippy::unnecessary_wraps)] +fn default_max_lifetime() -> Option { + Some(Duration::from_secs(30 * 60)) +} + +impl Default for DatabaseConfig { + fn default() -> Self { + Self { + uri: default_connection_string(), + host: None, + port: None, + socket: None, + username: None, + password: None, + database: None, + ssl_mode: None, + ssl_ca: None, + ssl_ca_file: None, + ssl_certificate: None, + ssl_certificate_file: None, + ssl_key: None, + ssl_key_file: None, + max_connections: default_max_connections(), + min_connections: Default::default(), + connect_timeout: default_connect_timeout(), + idle_timeout: default_idle_timeout(), + max_lifetime: default_max_lifetime(), + } + } +} + +/// Options for controlling the level of protection provided for PostgreSQL SSL +/// connections. +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "kebab-case")] +pub enum PgSslMode { + /// Only try a non-SSL connection. + Disable, + + /// First try a non-SSL connection; if that fails, try an SSL connection. + Allow, + + /// First try an SSL connection; if that fails, try a non-SSL connection. + Prefer, + + /// Only try an SSL connection. If a root CA file is present, verify the + /// connection in the same way as if `VerifyCa` was specified. + Require, + + /// Only try an SSL connection, and verify that the server certificate is + /// issued by a trusted certificate authority (CA). + VerifyCa, + + /// Only try an SSL connection; verify that the server certificate is issued + /// by a trusted CA and that the requested server host name matches that + /// in the certificate. + VerifyFull, +} + +/// Database connection configuration +#[serde_as] +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct DatabaseConfig { + /// Connection URI + /// + /// This must not be specified if `host`, `port`, `socket`, `username`, + /// `password`, or `database` are specified. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(url, default = "default_connection_string")] + pub uri: Option, + + /// Name of host to connect to + /// + /// This must not be specified if `uri` is specified. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option::")] + pub host: Option, + + /// Port number to connect at the server host + /// + /// This must not be specified if `uri` is specified. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(range(min = 1, max = 65535))] + pub port: Option, + + /// Directory containing the UNIX socket to connect to + /// + /// This must not be specified if `uri` is specified. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + pub socket: Option, + + /// PostgreSQL user name to connect as + /// + /// This must not be specified if `uri` is specified. + #[serde(skip_serializing_if = "Option::is_none")] + pub username: Option, + + /// Password to be used if the server demands password authentication + /// + /// This must not be specified if `uri` is specified. + #[serde(skip_serializing_if = "Option::is_none")] + pub password: Option, + + /// The database name + /// + /// This must not be specified if `uri` is specified. + #[serde(skip_serializing_if = "Option::is_none")] + pub database: Option, + + /// How to handle SSL connections + #[serde(skip_serializing_if = "Option::is_none")] + pub ssl_mode: Option, + + /// The PEM-encoded root certificate for SSL connections + /// + /// This must not be specified if the `ssl_ca_file` option is specified. + #[serde(skip_serializing_if = "Option::is_none")] + pub ssl_ca: Option, + + /// Path to the root certificate for SSL connections + /// + /// This must not be specified if the `ssl_ca` option is specified. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + pub ssl_ca_file: Option, + + /// The PEM-encoded client certificate for SSL connections + /// + /// This must not be specified if the `ssl_certificate_file` option is + /// specified. + #[serde(skip_serializing_if = "Option::is_none")] + pub ssl_certificate: Option, + + /// Path to the client certificate for SSL connections + /// + /// This must not be specified if the `ssl_certificate` option is specified. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + pub ssl_certificate_file: Option, + + /// The PEM-encoded client key for SSL connections + /// + /// This must not be specified if the `ssl_key_file` option is specified. + #[serde(skip_serializing_if = "Option::is_none")] + pub ssl_key: Option, + + /// Path to the client key for SSL connections + /// + /// This must not be specified if the `ssl_key` option is specified. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + pub ssl_key_file: Option, + + /// Set the maximum number of connections the pool should maintain + #[serde(default = "default_max_connections")] + pub max_connections: NonZeroU32, + + /// Set the minimum number of connections the pool should maintain + #[serde(default)] + pub min_connections: u32, + + /// Set the amount of time to attempt connecting to the database + #[schemars(with = "u64")] + #[serde(default = "default_connect_timeout")] + #[serde_as(as = "serde_with::DurationSeconds")] + pub connect_timeout: Duration, + + /// Set a maximum idle duration for individual connections + #[schemars(with = "Option")] + #[serde( + default = "default_idle_timeout", + skip_serializing_if = "Option::is_none" + )] + #[serde_as(as = "Option>")] + pub idle_timeout: Option, + + /// Set the maximum lifetime of individual connections + #[schemars(with = "u64")] + #[serde( + default = "default_max_lifetime", + skip_serializing_if = "Option::is_none" + )] + #[serde_as(as = "Option>")] + pub max_lifetime: Option, +} + +impl ConfigurationSection for DatabaseConfig { + const PATH: Option<&'static str> = Some("database"); + + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + let metadata = figment.find_metadata(Self::PATH.unwrap()); + let annotate = |mut error: figment::Error| { + error.metadata = metadata.cloned(); + error.profile = Some(figment::Profile::Default); + error.path = vec![Self::PATH.unwrap().to_owned()]; + error + }; + + // Check that the user did not specify both `uri` and the split options at the + // same time + let has_split_options = self.host.is_some() + || self.port.is_some() + || self.socket.is_some() + || self.username.is_some() + || self.password.is_some() + || self.database.is_some(); + + if self.uri.is_some() && has_split_options { + return Err(annotate(figment::error::Error::from( + "uri must not be specified if host, port, socket, username, password, or database are specified".to_owned(), + )).into()); + } + + if self.ssl_ca.is_some() && self.ssl_ca_file.is_some() { + return Err(annotate(figment::error::Error::from( + "ssl_ca must not be specified if ssl_ca_file is specified".to_owned(), + )) + .into()); + } + + if self.ssl_certificate.is_some() && self.ssl_certificate_file.is_some() { + return Err(annotate(figment::error::Error::from( + "ssl_certificate must not be specified if ssl_certificate_file is specified" + .to_owned(), + )) + .into()); + } + + if self.ssl_key.is_some() && self.ssl_key_file.is_some() { + return Err(annotate(figment::error::Error::from( + "ssl_key must not be specified if ssl_key_file is specified".to_owned(), + )) + .into()); + } + + if (self.ssl_key.is_some() || self.ssl_key_file.is_some()) + ^ (self.ssl_certificate.is_some() || self.ssl_certificate_file.is_some()) + { + return Err(annotate(figment::error::Error::from( + "both a ssl_certificate and a ssl_key must be set at the same time or none of them" + .to_owned(), + )) + .into()); + } + + Ok(()) + } +} +#[cfg(test)] +mod tests { + use figment::{ + Figment, Jail, + providers::{Format, Yaml}, + }; + + use super::*; + + #[test] + fn load_config() { + Jail::expect_with(|jail| { + jail.create_file( + "config.yaml", + r" + database: + uri: postgresql://user:password@host/database + ", + )?; + + let config = Figment::new() + .merge(Yaml::file("config.yaml")) + .extract_inner::("database")?; + + assert_eq!( + config.uri.as_deref(), + Some("postgresql://user:password@host/database") + ); + + Ok(()) + }); + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/email.rs b/matrix-authentication-service/crates/config/src/sections/email.rs new file mode 100644 index 00000000..8c365e3e --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/email.rs @@ -0,0 +1,280 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(deprecated)] + +use std::{num::NonZeroU16, str::FromStr}; + +use lettre::message::Mailbox; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize, de::Error}; + +use super::ConfigurationSection; + +/// Encryption mode to use +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "lowercase")] +pub enum EmailSmtpMode { + /// Plain text + Plain, + + /// `StartTLS` (starts as plain text then upgrade to TLS) + StartTls, + + /// TLS + Tls, +} + +/// What backend should be used when sending emails +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum EmailTransportKind { + /// Don't send emails anywhere + #[default] + Blackhole, + + /// Send emails via an SMTP relay + Smtp, + + /// Send emails by calling sendmail + Sendmail, +} + +fn default_email() -> String { + r#""Authentication Service" "#.to_owned() +} + +#[allow(clippy::unnecessary_wraps)] +fn default_sendmail_command() -> Option { + Some("sendmail".to_owned()) +} + +/// Configuration related to sending emails +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +pub struct EmailConfig { + /// Email address to use as From when sending emails + #[serde(default = "default_email")] + #[schemars(email)] + pub from: String, + + /// Email address to use as Reply-To when sending emails + #[serde(default = "default_email")] + #[schemars(email)] + pub reply_to: String, + + /// What backend should be used when sending emails + transport: EmailTransportKind, + + /// SMTP transport: Connection mode to the relay + #[serde(skip_serializing_if = "Option::is_none")] + mode: Option, + + /// SMTP transport: Hostname to connect to + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + hostname: Option, + + /// SMTP transport: Port to connect to. Default is 25 for plain, 465 for TLS + /// and 587 for `StartTLS` + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(range(min = 1, max = 65535))] + port: Option, + + /// SMTP transport: Username for use to authenticate when connecting to the + /// SMTP server + /// + /// Must be set if the `password` field is set + #[serde(skip_serializing_if = "Option::is_none")] + username: Option, + + /// SMTP transport: Password for use to authenticate when connecting to the + /// SMTP server + /// + /// Must be set if the `username` field is set + #[serde(skip_serializing_if = "Option::is_none")] + password: Option, + + /// Sendmail transport: Command to use to send emails + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(default = "default_sendmail_command")] + command: Option, +} + +impl EmailConfig { + /// What backend should be used when sending emails + #[must_use] + pub fn transport(&self) -> EmailTransportKind { + self.transport + } + + /// Connection mode to the relay + #[must_use] + pub fn mode(&self) -> Option { + self.mode + } + + /// Hostname to connect to + #[must_use] + pub fn hostname(&self) -> Option<&str> { + self.hostname.as_deref() + } + + /// Port to connect to + #[must_use] + pub fn port(&self) -> Option { + self.port + } + + /// Username for use to authenticate when connecting to the SMTP server + #[must_use] + pub fn username(&self) -> Option<&str> { + self.username.as_deref() + } + + /// Password for use to authenticate when connecting to the SMTP server + #[must_use] + pub fn password(&self) -> Option<&str> { + self.password.as_deref() + } + + /// Command to use to send emails + #[must_use] + pub fn command(&self) -> Option<&str> { + self.command.as_deref() + } +} + +impl Default for EmailConfig { + fn default() -> Self { + Self { + from: default_email(), + reply_to: default_email(), + transport: EmailTransportKind::Blackhole, + mode: None, + hostname: None, + port: None, + username: None, + password: None, + command: None, + } + } +} + +impl ConfigurationSection for EmailConfig { + const PATH: Option<&'static str> = Some("email"); + + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + let metadata = figment.find_metadata(Self::PATH.unwrap()); + + let error_on_field = |mut error: figment::error::Error, field: &'static str| { + error.metadata = metadata.cloned(); + error.profile = Some(figment::Profile::Default); + error.path = vec![Self::PATH.unwrap().to_owned(), field.to_owned()]; + error + }; + + let missing_field = |field: &'static str| { + error_on_field(figment::error::Error::missing_field(field), field) + }; + + let unexpected_field = |field: &'static str, expected_fields: &'static [&'static str]| { + error_on_field( + figment::error::Error::unknown_field(field, expected_fields), + field, + ) + }; + + match self.transport { + EmailTransportKind::Blackhole => {} + + EmailTransportKind::Smtp => { + if let Err(e) = Mailbox::from_str(&self.from) { + return Err(error_on_field(figment::error::Error::custom(e), "from").into()); + } + + if let Err(e) = Mailbox::from_str(&self.reply_to) { + return Err(error_on_field(figment::error::Error::custom(e), "reply_to").into()); + } + + match (self.username.is_some(), self.password.is_some()) { + (true, true) | (false, false) => {} + (true, false) => { + return Err(missing_field("password").into()); + } + (false, true) => { + return Err(missing_field("username").into()); + } + } + + if self.mode.is_none() { + return Err(missing_field("mode").into()); + } + + if self.hostname.is_none() { + return Err(missing_field("hostname").into()); + } + + if self.command.is_some() { + return Err(unexpected_field( + "command", + &[ + "from", + "reply_to", + "transport", + "mode", + "hostname", + "port", + "username", + "password", + ], + ) + .into()); + } + } + + EmailTransportKind::Sendmail => { + let expected_fields = &["from", "reply_to", "transport", "command"]; + + if let Err(e) = Mailbox::from_str(&self.from) { + return Err(error_on_field(figment::error::Error::custom(e), "from").into()); + } + + if let Err(e) = Mailbox::from_str(&self.reply_to) { + return Err(error_on_field(figment::error::Error::custom(e), "reply_to").into()); + } + + if self.command.is_none() { + return Err(missing_field("command").into()); + } + + if self.mode.is_some() { + return Err(unexpected_field("mode", expected_fields).into()); + } + + if self.hostname.is_some() { + return Err(unexpected_field("hostname", expected_fields).into()); + } + + if self.port.is_some() { + return Err(unexpected_field("port", expected_fields).into()); + } + + if self.username.is_some() { + return Err(unexpected_field("username", expected_fields).into()); + } + + if self.password.is_some() { + return Err(unexpected_field("password", expected_fields).into()); + } + } + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/experimental.rs b/matrix-authentication-service/crates/config/src/sections/experimental.rs new file mode 100644 index 00000000..b8f3920b --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/experimental.rs @@ -0,0 +1,126 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::num::NonZeroU64; + +use chrono::Duration; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; + +use crate::ConfigurationSection; + +fn default_true() -> bool { + true +} + +fn default_token_ttl() -> Duration { + Duration::microseconds(5 * 60 * 1000 * 1000) +} + +fn is_default_token_ttl(value: &Duration) -> bool { + *value == default_token_ttl() +} + +/// Configuration options for the inactive session expiration feature +#[serde_as] +#[derive(Clone, Debug, Deserialize, JsonSchema, Serialize)] +pub struct InactiveSessionExpirationConfig { + /// Time after which an inactive session is automatically finished + #[schemars(with = "u64", range(min = 600, max = 7_776_000))] + #[serde_as(as = "serde_with::DurationSeconds")] + pub ttl: Duration, + + /// Should compatibility sessions expire after inactivity + #[serde(default = "default_true")] + pub expire_compat_sessions: bool, + + /// Should OAuth 2.0 sessions expire after inactivity + #[serde(default = "default_true")] + pub expire_oauth_sessions: bool, + + /// Should user sessions expire after inactivity + #[serde(default = "default_true")] + pub expire_user_sessions: bool, +} + +/// Configuration sections for experimental options +/// +/// Do not change these options unless you know what you are doing. +#[serde_as] +#[derive(Clone, Debug, Deserialize, JsonSchema, Serialize)] +pub struct ExperimentalConfig { + /// Time-to-live of access tokens in seconds. Defaults to 5 minutes. + #[schemars(with = "u64", range(min = 60, max = 86400))] + #[serde( + default = "default_token_ttl", + skip_serializing_if = "is_default_token_ttl" + )] + #[serde_as(as = "serde_with::DurationSeconds")] + pub access_token_ttl: Duration, + + /// Time-to-live of compatibility access tokens in seconds. Defaults to 5 + /// minutes. + #[schemars(with = "u64", range(min = 60, max = 86400))] + #[serde( + default = "default_token_ttl", + skip_serializing_if = "is_default_token_ttl" + )] + #[serde_as(as = "serde_with::DurationSeconds")] + pub compat_token_ttl: Duration, + + /// Experimetal feature to automatically expire inactive sessions + /// + /// Disabled by default + #[serde(skip_serializing_if = "Option::is_none")] + pub inactive_session_expiration: Option, + + /// Experimental feature to show a plan management tab and iframe. + /// This value is passed through "as is" to the client without any + /// validation. + #[serde(skip_serializing_if = "Option::is_none")] + pub plan_management_iframe_uri: Option, + + /// Experimental feature to limit the number of application sessions per + /// user. + /// + /// Disabled by default. + #[serde(skip_serializing_if = "Option::is_none")] + pub session_limit: Option, +} + +impl Default for ExperimentalConfig { + fn default() -> Self { + Self { + access_token_ttl: default_token_ttl(), + compat_token_ttl: default_token_ttl(), + inactive_session_expiration: None, + plan_management_iframe_uri: None, + session_limit: None, + } + } +} + +impl ExperimentalConfig { + pub(crate) fn is_default(&self) -> bool { + is_default_token_ttl(&self.access_token_ttl) + && is_default_token_ttl(&self.compat_token_ttl) + && self.inactive_session_expiration.is_none() + && self.plan_management_iframe_uri.is_none() + && self.session_limit.is_none() + } +} + +impl ConfigurationSection for ExperimentalConfig { + const PATH: Option<&'static str> = Some("experimental"); +} + +/// Configuration options for the session limit feature +#[derive(Clone, Debug, Deserialize, JsonSchema, Serialize)] +pub struct SessionLimitConfig { + pub soft_limit: NonZeroU64, + pub hard_limit: NonZeroU64, +} diff --git a/matrix-authentication-service/crates/config/src/sections/http.rs b/matrix-authentication-service/crates/config/src/sections/http.rs new file mode 100644 index 00000000..880e4e06 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/http.rs @@ -0,0 +1,473 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(deprecated)] + +use std::borrow::Cow; + +use anyhow::bail; +use camino::Utf8PathBuf; +use ipnetwork::IpNetwork; +use mas_keystore::PrivateKey; +use rustls_pki_types::{CertificateDer, PrivateKeyDer, PrivatePkcs8KeyDer, pem::PemObject}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use url::Url; + +use super::ConfigurationSection; + +fn default_public_base() -> Url { + "http://[::]:8080".parse().unwrap() +} + +#[cfg(not(any(feature = "docker", feature = "dist")))] +fn http_listener_assets_path_default() -> Utf8PathBuf { + "./frontend/dist/".into() +} + +#[cfg(feature = "docker")] +fn http_listener_assets_path_default() -> Utf8PathBuf { + "/usr/local/share/mas-cli/assets/".into() +} + +#[cfg(feature = "dist")] +fn http_listener_assets_path_default() -> Utf8PathBuf { + "./share/assets/".into() +} + +fn is_default_http_listener_assets_path(value: &Utf8PathBuf) -> bool { + *value == http_listener_assets_path_default() +} + +fn default_trusted_proxies() -> Vec { + vec![ + IpNetwork::new([192, 168, 0, 0].into(), 16).unwrap(), + IpNetwork::new([172, 16, 0, 0].into(), 12).unwrap(), + IpNetwork::new([10, 0, 0, 0].into(), 10).unwrap(), + IpNetwork::new(std::net::Ipv4Addr::LOCALHOST.into(), 8).unwrap(), + IpNetwork::new([0xfd00, 0, 0, 0, 0, 0, 0, 0].into(), 8).unwrap(), + IpNetwork::new(std::net::Ipv6Addr::LOCALHOST.into(), 128).unwrap(), + ] +} + +/// Kind of socket +#[derive(Debug, Serialize, Deserialize, JsonSchema, Clone, Copy)] +#[serde(rename_all = "lowercase")] +pub enum UnixOrTcp { + /// UNIX domain socket + Unix, + + /// TCP socket + Tcp, +} + +impl UnixOrTcp { + /// UNIX domain socket + #[must_use] + pub const fn unix() -> Self { + Self::Unix + } + + /// TCP socket + #[must_use] + pub const fn tcp() -> Self { + Self::Tcp + } +} + +/// Configuration of a single listener +#[derive(Debug, Serialize, Deserialize, JsonSchema, Clone)] +#[serde(untagged)] +pub enum BindConfig { + /// Listen on the specified host and port + Listen { + /// Host on which to listen. + /// + /// Defaults to listening on all addresses + #[serde(skip_serializing_if = "Option::is_none")] + host: Option, + + /// Port on which to listen. + port: u16, + }, + + /// Listen on the specified address + Address { + /// Host and port on which to listen + #[schemars( + example = &"[::1]:8080", + example = &"[::]:8080", + example = &"127.0.0.1:8080", + example = &"0.0.0.0:8080", + )] + address: String, + }, + + /// Listen on a UNIX domain socket + Unix { + /// Path to the socket + #[schemars(with = "String")] + socket: Utf8PathBuf, + }, + + /// Accept connections on file descriptors passed by the parent process. + /// + /// This is useful for grabbing sockets passed by systemd. + /// + /// See + FileDescriptor { + /// Index of the file descriptor. Note that this is offseted by 3 + /// because of the standard input/output sockets, so setting + /// here a value of `0` will grab the file descriptor `3` + #[serde(default)] + fd: usize, + + /// Whether the socket is a TCP socket or a UNIX domain socket. Defaults + /// to TCP. + #[serde(default = "UnixOrTcp::tcp")] + kind: UnixOrTcp, + }, +} + +/// Configuration related to TLS on a listener +#[derive(Debug, Serialize, Deserialize, JsonSchema, Clone)] +pub struct TlsConfig { + /// PEM-encoded X509 certificate chain + /// + /// Exactly one of `certificate` or `certificate_file` must be set. + #[serde(skip_serializing_if = "Option::is_none")] + pub certificate: Option, + + /// File containing the PEM-encoded X509 certificate chain + /// + /// Exactly one of `certificate` or `certificate_file` must be set. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + pub certificate_file: Option, + + /// PEM-encoded private key + /// + /// Exactly one of `key` or `key_file` must be set. + #[serde(skip_serializing_if = "Option::is_none")] + pub key: Option, + + /// File containing a PEM or DER-encoded private key + /// + /// Exactly one of `key` or `key_file` must be set. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + pub key_file: Option, + + /// Password used to decode the private key + /// + /// One of `password` or `password_file` must be set if the key is + /// encrypted. + #[serde(skip_serializing_if = "Option::is_none")] + pub password: Option, + + /// Password file used to decode the private key + /// + /// One of `password` or `password_file` must be set if the key is + /// encrypted. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + pub password_file: Option, +} + +impl TlsConfig { + /// Load the TLS certificate chain and key file from disk + /// + /// # Errors + /// + /// Returns an error if an error was encountered either while: + /// - reading the certificate, key or password files + /// - decoding the key as PEM or DER + /// - decrypting the key if encrypted + /// - a password was provided but the key was not encrypted + /// - decoding the certificate chain as PEM + /// - the certificate chain is empty + pub fn load( + &self, + ) -> Result<(PrivateKeyDer<'static>, Vec>), anyhow::Error> { + let password = match (&self.password, &self.password_file) { + (None, None) => None, + (Some(_), Some(_)) => { + bail!("Only one of `password` or `password_file` can be set at a time") + } + (Some(password), None) => Some(Cow::Borrowed(password)), + (None, Some(path)) => Some(Cow::Owned(std::fs::read_to_string(path)?)), + }; + + // Read the key either embedded in the config file or on disk + let key = match (&self.key, &self.key_file) { + (None, None) => bail!("Either `key` or `key_file` must be set"), + (Some(_), Some(_)) => bail!("Only one of `key` or `key_file` can be set at a time"), + (Some(key), None) => { + // If the key was embedded in the config file, assume it is formatted as PEM + if let Some(password) = password { + PrivateKey::load_encrypted_pem(key, password.as_bytes())? + } else { + PrivateKey::load_pem(key)? + } + } + (None, Some(path)) => { + // When reading from disk, it might be either PEM or DER. `PrivateKey::load*` + // will try both. + let key = std::fs::read(path)?; + if let Some(password) = password { + PrivateKey::load_encrypted(&key, password.as_bytes())? + } else { + PrivateKey::load(&key)? + } + } + }; + + // Re-serialize the key to PKCS#8 DER, so rustls can consume it + let key = key.to_pkcs8_der()?; + let key = PrivatePkcs8KeyDer::from(key.to_vec()).into(); + + let certificate_chain_pem = match (&self.certificate, &self.certificate_file) { + (None, None) => bail!("Either `certificate` or `certificate_file` must be set"), + (Some(_), Some(_)) => { + bail!("Only one of `certificate` or `certificate_file` can be set at a time") + } + (Some(certificate), None) => Cow::Borrowed(certificate), + (None, Some(path)) => Cow::Owned(std::fs::read_to_string(path)?), + }; + + let certificate_chain = CertificateDer::pem_slice_iter(certificate_chain_pem.as_bytes()) + .collect::, _>>()?; + + if certificate_chain.is_empty() { + bail!("TLS certificate chain is empty (or invalid)") + } + + Ok((key, certificate_chain)) + } +} + +/// HTTP resources to mount +#[derive(Debug, Serialize, Deserialize, JsonSchema, Clone)] +#[serde(tag = "name", rename_all = "lowercase")] +pub enum Resource { + /// Healthcheck endpoint (/health) + Health, + + /// Prometheus metrics endpoint (/metrics) + Prometheus, + + /// OIDC discovery endpoints + Discovery, + + /// Pages destined to be viewed by humans + Human, + + /// GraphQL endpoint + GraphQL { + /// Enabled the GraphQL playground + #[serde(default, skip_serializing_if = "std::ops::Not::not")] + playground: bool, + + /// Allow access for OAuth 2.0 clients (undocumented) + #[serde(default, skip_serializing_if = "std::ops::Not::not")] + undocumented_oauth2_access: bool, + }, + + /// OAuth-related APIs + OAuth, + + /// Matrix compatibility API + Compat, + + /// Static files + Assets { + /// Path to the directory to serve. + #[serde( + default = "http_listener_assets_path_default", + skip_serializing_if = "is_default_http_listener_assets_path" + )] + #[schemars(with = "String")] + path: Utf8PathBuf, + }, + + /// Admin API, served at `/api/admin/v1` + AdminApi, + + /// Mount a "/connection-info" handler which helps debugging informations on + /// the upstream connection + #[serde(rename = "connection-info")] + ConnectionInfo, +} + +/// Configuration of a listener +#[derive(Debug, Serialize, Deserialize, JsonSchema, Clone)] +pub struct ListenerConfig { + /// A unique name for this listener which will be shown in traces and in + /// metrics labels + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + + /// List of resources to mount + pub resources: Vec, + + /// HTTP prefix to mount the resources on + #[serde(skip_serializing_if = "Option::is_none")] + pub prefix: Option, + + /// List of sockets to bind + pub binds: Vec, + + /// Accept `HAProxy`'s Proxy Protocol V1 + #[serde(default)] + pub proxy_protocol: bool, + + /// If set, makes the listener use TLS with the provided certificate and key + #[serde(skip_serializing_if = "Option::is_none")] + pub tls: Option, +} + +/// Configuration related to the web server +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct HttpConfig { + /// List of listeners to run + #[serde(default)] + pub listeners: Vec, + + /// List of trusted reverse proxies that can set the `X-Forwarded-For` + /// header + #[serde(default = "default_trusted_proxies")] + #[schemars(with = "Vec", inner(ip))] + pub trusted_proxies: Vec, + + /// Public URL base from where the authentication service is reachable + pub public_base: Url, + + /// OIDC issuer URL. Defaults to `public_base` if not set. + #[serde(skip_serializing_if = "Option::is_none")] + pub issuer: Option, +} + +impl Default for HttpConfig { + fn default() -> Self { + Self { + listeners: vec![ + ListenerConfig { + name: Some("web".to_owned()), + resources: vec![ + Resource::Discovery, + Resource::Human, + Resource::OAuth, + Resource::Compat, + Resource::GraphQL { + playground: false, + undocumented_oauth2_access: false, + }, + Resource::Assets { + path: http_listener_assets_path_default(), + }, + ], + prefix: None, + tls: None, + proxy_protocol: false, + binds: vec![BindConfig::Address { + address: "[::]:8080".into(), + }], + }, + ListenerConfig { + name: Some("internal".to_owned()), + resources: vec![Resource::Health], + prefix: None, + tls: None, + proxy_protocol: false, + binds: vec![BindConfig::Listen { + host: Some("localhost".to_owned()), + port: 8081, + }], + }, + ], + trusted_proxies: default_trusted_proxies(), + issuer: Some(default_public_base()), + public_base: default_public_base(), + } + } +} + +impl ConfigurationSection for HttpConfig { + const PATH: Option<&'static str> = Some("http"); + + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + for (index, listener) in self.listeners.iter().enumerate() { + let annotate = |mut error: figment::Error| { + error.metadata = figment + .find_metadata(&format!("{root}.listeners", root = Self::PATH.unwrap())) + .cloned(); + error.profile = Some(figment::Profile::Default); + error.path = vec![ + Self::PATH.unwrap().to_owned(), + "listeners".to_owned(), + index.to_string(), + ]; + error + }; + + if listener.resources.is_empty() { + return Err( + annotate(figment::Error::from("listener has no resources".to_owned())).into(), + ); + } + + if listener.binds.is_empty() { + return Err(annotate(figment::Error::from( + "listener does not bind to any address".to_owned(), + )) + .into()); + } + + if let Some(tls_config) = &listener.tls { + if tls_config.certificate.is_some() && tls_config.certificate_file.is_some() { + return Err(annotate(figment::Error::from( + "Only one of `certificate` or `certificate_file` can be set at a time" + .to_owned(), + )) + .into()); + } + + if tls_config.certificate.is_none() && tls_config.certificate_file.is_none() { + return Err(annotate(figment::Error::from( + "TLS configuration is missing a certificate".to_owned(), + )) + .into()); + } + + if tls_config.key.is_some() && tls_config.key_file.is_some() { + return Err(annotate(figment::Error::from( + "Only one of `key` or `key_file` can be set at a time".to_owned(), + )) + .into()); + } + + if tls_config.key.is_none() && tls_config.key_file.is_none() { + return Err(annotate(figment::Error::from( + "TLS configuration is missing a private key".to_owned(), + )) + .into()); + } + + if tls_config.password.is_some() && tls_config.password_file.is_some() { + return Err(annotate(figment::Error::from( + "Only one of `password` or `password_file` can be set at a time".to_owned(), + )) + .into()); + } + } + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/matrix.rs b/matrix-authentication-service/crates/config/src/sections/matrix.rs new file mode 100644 index 00000000..1b08c1a0 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/matrix.rs @@ -0,0 +1,235 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::bail; +use camino::Utf8PathBuf; +use rand::{ + Rng, + distributions::{Alphanumeric, DistString}, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; +use url::Url; + +use super::ConfigurationSection; + +fn default_homeserver() -> String { + "localhost:8008".to_owned() +} + +fn default_endpoint() -> Url { + Url::parse("http://localhost:8008/").unwrap() +} + +/// The kind of homeserver it is. +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum HomeserverKind { + /// Homeserver is Synapse, version 1.135.0 or newer + #[default] + Synapse, + + /// Homeserver is Synapse, version 1.135.0 or newer, in read-only mode + /// + /// This is meant for testing rolling out Matrix Authentication Service with + /// no risk of writing data to the homeserver. + SynapseReadOnly, + + /// Homeserver is Synapse, using the legacy API + SynapseLegacy, + + /// Homeserver is Synapse, with the modern API available (>= 1.135.0) + SynapseModern, +} + +/// Shared secret between MAS and the homeserver. +/// +/// It either holds the secret value directly or references a file where the +/// secret is stored. +#[derive(Clone, Debug)] +pub enum Secret { + File(Utf8PathBuf), + Value(String), +} + +/// Secret fields as serialized in JSON. +#[derive(JsonSchema, Serialize, Deserialize, Clone, Debug)] +struct SecretRaw { + #[schemars(with = "Option")] + #[serde(skip_serializing_if = "Option::is_none")] + secret_file: Option, + #[serde(skip_serializing_if = "Option::is_none")] + secret: Option, +} + +impl TryFrom for Secret { + type Error = anyhow::Error; + + fn try_from(value: SecretRaw) -> Result { + match (value.secret, value.secret_file) { + (None, None) => bail!("Missing `secret` or `secret_file`"), + (None, Some(path)) => Ok(Secret::File(path)), + (Some(secret), None) => Ok(Secret::Value(secret)), + (Some(_), Some(_)) => bail!("Cannot specify both `secret` and `secret_file`"), + } + } +} + +impl From for SecretRaw { + fn from(value: Secret) -> Self { + match value { + Secret::File(path) => SecretRaw { + secret_file: Some(path), + secret: None, + }, + Secret::Value(secret) => SecretRaw { + secret_file: None, + secret: Some(secret), + }, + } + } +} + +/// Configuration related to the Matrix homeserver +#[serde_as] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct MatrixConfig { + /// The kind of homeserver it is. + #[serde(default)] + pub kind: HomeserverKind, + + /// The server name of the homeserver. + #[serde(default = "default_homeserver")] + pub homeserver: String, + + /// Shared secret to use for calls to the admin API + #[schemars(with = "SecretRaw")] + #[serde_as(as = "serde_with::TryFromInto")] + #[serde(flatten)] + pub secret: Secret, + + /// The base URL of the homeserver's client API + #[serde(default = "default_endpoint")] + pub endpoint: Url, +} + +impl ConfigurationSection for MatrixConfig { + const PATH: Option<&'static str> = Some("matrix"); +} + +impl MatrixConfig { + /// Returns the shared secret. + /// + /// If `secret_file` was given, the secret is read from that file. + /// + /// # Errors + /// + /// Returns an error when the shared secret could not be read from file. + pub async fn secret(&self) -> anyhow::Result { + Ok(match &self.secret { + Secret::File(path) => { + let raw = tokio::fs::read_to_string(path).await?; + // Trim the secret when read from file to match Synapse's behaviour + raw.trim().to_string() + } + Secret::Value(secret) => secret.clone(), + }) + } + + pub(crate) fn generate(mut rng: R) -> Self + where + R: Rng + Send, + { + Self { + kind: HomeserverKind::default(), + homeserver: default_homeserver(), + secret: Secret::Value(Alphanumeric.sample_string(&mut rng, 32)), + endpoint: default_endpoint(), + } + } + + pub(crate) fn test() -> Self { + Self { + kind: HomeserverKind::default(), + homeserver: default_homeserver(), + secret: Secret::Value("test".to_owned()), + endpoint: default_endpoint(), + } + } +} + +#[cfg(test)] +mod tests { + use figment::{ + Figment, Jail, + providers::{Format, Yaml}, + }; + use tokio::{runtime::Handle, task}; + + use super::*; + + #[tokio::test] + async fn load_config() { + task::spawn_blocking(|| { + Jail::expect_with(|jail| { + jail.create_file( + "config.yaml", + r" + matrix: + homeserver: matrix.org + secret_file: secret + ", + )?; + jail.create_file("secret", r"m472!x53c237")?; + + let config = Figment::new() + .merge(Yaml::file("config.yaml")) + .extract_inner::("matrix")?; + + Handle::current().block_on(async move { + assert_eq!(&config.homeserver, "matrix.org"); + assert!(matches!(config.secret, Secret::File(ref p) if p == "secret")); + assert_eq!(config.secret().await.unwrap(), "m472!x53c237"); + }); + + Ok(()) + }); + }) + .await + .unwrap(); + } + + #[tokio::test] + async fn load_config_inline_secrets() { + task::spawn_blocking(|| { + Jail::expect_with(|jail| { + jail.create_file( + "config.yaml", + r" + matrix: + homeserver: matrix.org + secret: m472!x53c237 + ", + )?; + + let config = Figment::new() + .merge(Yaml::file("config.yaml")) + .extract_inner::("matrix")?; + + Handle::current().block_on(async move { + assert_eq!(&config.homeserver, "matrix.org"); + assert!(matches!(config.secret, Secret::Value(ref v) if v == "m472!x53c237")); + assert_eq!(config.secret().await.unwrap(), "m472!x53c237"); + }); + + Ok(()) + }); + }) + .await + .unwrap(); + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/mod.rs b/matrix-authentication-service/crates/config/src/sections/mod.rs new file mode 100644 index 00000000..eb4ff2a4 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/mod.rs @@ -0,0 +1,386 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::bail; +use camino::Utf8PathBuf; +use rand::Rng; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +mod account; +mod branding; +mod captcha; +mod clients; +mod database; +mod email; +mod experimental; +mod http; +mod matrix; +mod passwords; +mod policy; +mod rate_limiting; +mod secrets; +mod telemetry; +mod templates; +mod upstream_oauth2; + +pub use self::{ + account::AccountConfig, + branding::BrandingConfig, + captcha::{CaptchaConfig, CaptchaServiceKind}, + clients::{ClientAuthMethodConfig, ClientConfig, ClientsConfig}, + database::{DatabaseConfig, PgSslMode}, + email::{EmailConfig, EmailSmtpMode, EmailTransportKind}, + experimental::ExperimentalConfig, + http::{ + BindConfig as HttpBindConfig, HttpConfig, ListenerConfig as HttpListenerConfig, + Resource as HttpResource, TlsConfig as HttpTlsConfig, UnixOrTcp, + }, + matrix::{HomeserverKind, MatrixConfig}, + passwords::{ + Algorithm as PasswordAlgorithm, HashingScheme as PasswordHashingScheme, PasswordsConfig, + }, + policy::PolicyConfig, + rate_limiting::RateLimitingConfig, + secrets::SecretsConfig, + telemetry::{ + MetricsConfig, MetricsExporterKind, Propagator, TelemetryConfig, TracingConfig, + TracingExporterKind, + }, + templates::TemplatesConfig, + upstream_oauth2::{ + ClaimsImports as UpstreamOAuth2ClaimsImports, DiscoveryMode as UpstreamOAuth2DiscoveryMode, + EmailImportPreference as UpstreamOAuth2EmailImportPreference, + ImportAction as UpstreamOAuth2ImportAction, + OnBackchannelLogout as UpstreamOAuth2OnBackchannelLogout, + OnConflict as UpstreamOAuth2OnConflict, PkceMethod as UpstreamOAuth2PkceMethod, + Provider as UpstreamOAuth2Provider, ResponseMode as UpstreamOAuth2ResponseMode, + TokenAuthMethod as UpstreamOAuth2TokenAuthMethod, UpstreamOAuth2Config, + }, +}; +use crate::util::ConfigurationSection; + +/// Application configuration root +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct RootConfig { + /// List of OAuth 2.0/OIDC clients config + #[serde(default, skip_serializing_if = "ClientsConfig::is_default")] + pub clients: ClientsConfig, + + /// Configuration of the HTTP server + #[serde(default)] + pub http: HttpConfig, + + /// Database connection configuration + #[serde(default)] + pub database: DatabaseConfig, + + /// Configuration related to sending monitoring data + #[serde(default, skip_serializing_if = "TelemetryConfig::is_default")] + pub telemetry: TelemetryConfig, + + /// Configuration related to templates + #[serde(default, skip_serializing_if = "TemplatesConfig::is_default")] + pub templates: TemplatesConfig, + + /// Configuration related to sending emails + #[serde(default)] + pub email: EmailConfig, + + /// Application secrets + pub secrets: SecretsConfig, + + /// Configuration related to user passwords + #[serde(default)] + pub passwords: PasswordsConfig, + + /// Configuration related to the homeserver + pub matrix: MatrixConfig, + + /// Configuration related to the OPA policies + #[serde(default, skip_serializing_if = "PolicyConfig::is_default")] + pub policy: PolicyConfig, + + /// Configuration related to limiting the rate of user actions to prevent + /// abuse + #[serde(default, skip_serializing_if = "RateLimitingConfig::is_default")] + pub rate_limiting: RateLimitingConfig, + + /// Configuration related to upstream OAuth providers + #[serde(default, skip_serializing_if = "UpstreamOAuth2Config::is_default")] + pub upstream_oauth2: UpstreamOAuth2Config, + + /// Configuration section for tweaking the branding of the service + #[serde(default, skip_serializing_if = "BrandingConfig::is_default")] + pub branding: BrandingConfig, + + /// Configuration section to setup CAPTCHA protection on a few operations + #[serde(default, skip_serializing_if = "CaptchaConfig::is_default")] + pub captcha: CaptchaConfig, + + /// Configuration section to configure features related to account + /// management + #[serde(default, skip_serializing_if = "AccountConfig::is_default")] + pub account: AccountConfig, + + /// Experimental configuration options + #[serde(default, skip_serializing_if = "ExperimentalConfig::is_default")] + pub experimental: ExperimentalConfig, +} + +impl ConfigurationSection for RootConfig { + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + self.clients.validate(figment)?; + self.http.validate(figment)?; + self.database.validate(figment)?; + self.telemetry.validate(figment)?; + self.templates.validate(figment)?; + self.email.validate(figment)?; + self.passwords.validate(figment)?; + self.secrets.validate(figment)?; + self.matrix.validate(figment)?; + self.policy.validate(figment)?; + self.rate_limiting.validate(figment)?; + self.upstream_oauth2.validate(figment)?; + self.branding.validate(figment)?; + self.captcha.validate(figment)?; + self.account.validate(figment)?; + self.experimental.validate(figment)?; + + Ok(()) + } +} + +impl RootConfig { + /// Generate a new configuration with random secrets + /// + /// # Errors + /// + /// Returns an error if the secrets could not be generated + pub async fn generate(mut rng: R) -> anyhow::Result + where + R: Rng + Send, + { + Ok(Self { + clients: ClientsConfig::default(), + http: HttpConfig::default(), + database: DatabaseConfig::default(), + telemetry: TelemetryConfig::default(), + templates: TemplatesConfig::default(), + email: EmailConfig::default(), + passwords: PasswordsConfig::default(), + secrets: SecretsConfig::generate(&mut rng).await?, + matrix: MatrixConfig::generate(&mut rng), + policy: PolicyConfig::default(), + rate_limiting: RateLimitingConfig::default(), + upstream_oauth2: UpstreamOAuth2Config::default(), + branding: BrandingConfig::default(), + captcha: CaptchaConfig::default(), + account: AccountConfig::default(), + experimental: ExperimentalConfig::default(), + }) + } + + /// Configuration used in tests + #[must_use] + pub fn test() -> Self { + Self { + clients: ClientsConfig::default(), + http: HttpConfig::default(), + database: DatabaseConfig::default(), + telemetry: TelemetryConfig::default(), + templates: TemplatesConfig::default(), + passwords: PasswordsConfig::default(), + email: EmailConfig::default(), + secrets: SecretsConfig::test(), + matrix: MatrixConfig::test(), + policy: PolicyConfig::default(), + rate_limiting: RateLimitingConfig::default(), + upstream_oauth2: UpstreamOAuth2Config::default(), + branding: BrandingConfig::default(), + captcha: CaptchaConfig::default(), + account: AccountConfig::default(), + experimental: ExperimentalConfig::default(), + } + } +} + +/// Partial configuration actually used by the server +#[allow(missing_docs)] +#[derive(Debug, Deserialize)] +pub struct AppConfig { + #[serde(default)] + pub http: HttpConfig, + + #[serde(default)] + pub database: DatabaseConfig, + + #[serde(default)] + pub templates: TemplatesConfig, + + #[serde(default)] + pub email: EmailConfig, + + pub secrets: SecretsConfig, + + #[serde(default)] + pub passwords: PasswordsConfig, + + pub matrix: MatrixConfig, + + #[serde(default)] + pub policy: PolicyConfig, + + #[serde(default)] + pub rate_limiting: RateLimitingConfig, + + #[serde(default)] + pub branding: BrandingConfig, + + #[serde(default)] + pub captcha: CaptchaConfig, + + #[serde(default)] + pub account: AccountConfig, + + #[serde(default)] + pub experimental: ExperimentalConfig, +} + +impl ConfigurationSection for AppConfig { + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + self.http.validate(figment)?; + self.database.validate(figment)?; + self.templates.validate(figment)?; + self.email.validate(figment)?; + self.passwords.validate(figment)?; + self.secrets.validate(figment)?; + self.matrix.validate(figment)?; + self.policy.validate(figment)?; + self.rate_limiting.validate(figment)?; + self.branding.validate(figment)?; + self.captcha.validate(figment)?; + self.account.validate(figment)?; + self.experimental.validate(figment)?; + + Ok(()) + } +} + +/// Partial config used by the `mas-cli config sync` command +#[allow(missing_docs)] +#[derive(Debug, Deserialize)] +pub struct SyncConfig { + #[serde(default)] + pub database: DatabaseConfig, + + pub secrets: SecretsConfig, + + #[serde(default)] + pub clients: ClientsConfig, + + #[serde(default)] + pub upstream_oauth2: UpstreamOAuth2Config, +} + +impl ConfigurationSection for SyncConfig { + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + self.database.validate(figment)?; + self.secrets.validate(figment)?; + self.clients.validate(figment)?; + self.upstream_oauth2.validate(figment)?; + + Ok(()) + } +} + +/// Client secret config option. +/// +/// It either holds the client secret value directly or references a file where +/// the client secret is stored. +#[derive(Clone, Debug)] +pub enum ClientSecret { + /// Path to the file containing the client secret. + File(Utf8PathBuf), + + /// Client secret value. + Value(String), +} + +/// Client secret fields as serialized in JSON. +#[derive(JsonSchema, Serialize, Deserialize, Clone, Debug)] +pub struct ClientSecretRaw { + /// Path to the file containing the client secret. The client secret is used + /// by the `client_secret_basic`, `client_secret_post` and + /// `client_secret_jwt` authentication methods. + #[schemars(with = "Option")] + #[serde(skip_serializing_if = "Option::is_none")] + client_secret_file: Option, + + /// Alternative to `client_secret_file`: Reads the client secret directly + /// from the config. + #[serde(skip_serializing_if = "Option::is_none")] + client_secret: Option, +} + +impl ClientSecret { + /// Returns the client secret. + /// + /// If `client_secret_file` was given, the secret is read from that file. + /// + /// # Errors + /// + /// Returns an error when the client secret could not be read from file. + pub async fn value(&self) -> anyhow::Result { + Ok(match self { + ClientSecret::File(path) => tokio::fs::read_to_string(path).await?, + ClientSecret::Value(client_secret) => client_secret.clone(), + }) + } +} + +impl TryFrom for Option { + type Error = anyhow::Error; + + fn try_from(value: ClientSecretRaw) -> Result { + match (value.client_secret, value.client_secret_file) { + (None, None) => Ok(None), + (None, Some(path)) => Ok(Some(ClientSecret::File(path))), + (Some(client_secret), None) => Ok(Some(ClientSecret::Value(client_secret))), + (Some(_), Some(_)) => { + bail!("Cannot specify both `client_secret` and `client_secret_file`") + } + } + } +} + +impl From> for ClientSecretRaw { + fn from(value: Option) -> Self { + match value { + Some(ClientSecret::File(path)) => ClientSecretRaw { + client_secret_file: Some(path), + client_secret: None, + }, + Some(ClientSecret::Value(client_secret)) => ClientSecretRaw { + client_secret_file: None, + client_secret: Some(client_secret), + }, + None => ClientSecretRaw { + client_secret_file: None, + client_secret: None, + }, + } + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/passwords.rs b/matrix-authentication-service/crates/config/src/sections/passwords.rs new file mode 100644 index 00000000..b15c009f --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/passwords.rs @@ -0,0 +1,230 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::cmp::Reverse; + +use anyhow::bail; +use camino::Utf8PathBuf; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use crate::ConfigurationSection; + +fn default_schemes() -> Vec { + vec![HashingScheme { + version: 1, + algorithm: Algorithm::default(), + cost: None, + secret: None, + secret_file: None, + unicode_normalization: false, + }] +} + +fn default_enabled() -> bool { + true +} + +fn default_minimum_complexity() -> u8 { + 3 +} + +/// User password hashing config +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct PasswordsConfig { + /// Whether password-based authentication is enabled + #[serde(default = "default_enabled")] + pub enabled: bool, + + /// The hashing schemes to use for hashing and validating passwords + /// + /// The hashing scheme with the highest version number will be used for + /// hashing new passwords. + #[serde(default = "default_schemes")] + pub schemes: Vec, + + /// Score between 0 and 4 determining the minimum allowed password + /// complexity. Scores are based on the ESTIMATED number of guesses + /// needed to guess the password. + /// + /// - 0: less than 10^2 (100) + /// - 1: less than 10^4 (10'000) + /// - 2: less than 10^6 (1'000'000) + /// - 3: less than 10^8 (100'000'000) + /// - 4: any more than that + #[serde(default = "default_minimum_complexity")] + minimum_complexity: u8, +} + +impl Default for PasswordsConfig { + fn default() -> Self { + Self { + enabled: default_enabled(), + schemes: default_schemes(), + minimum_complexity: default_minimum_complexity(), + } + } +} + +impl ConfigurationSection for PasswordsConfig { + const PATH: Option<&'static str> = Some("passwords"); + + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + let annotate = |mut error: figment::Error| { + error.metadata = figment.find_metadata(Self::PATH.unwrap()).cloned(); + error.profile = Some(figment::Profile::Default); + error.path = vec![Self::PATH.unwrap().to_owned()]; + error + }; + + if !self.enabled { + // Skip validation if password-based authentication is disabled + return Ok(()); + } + + if self.schemes.is_empty() { + return Err(annotate(figment::Error::from( + "Requires at least one password scheme in the config".to_owned(), + )) + .into()); + } + + for scheme in &self.schemes { + if scheme.secret.is_some() && scheme.secret_file.is_some() { + return Err(annotate(figment::Error::from( + "Cannot specify both `secret` and `secret_file`".to_owned(), + )) + .into()); + } + } + + Ok(()) + } +} + +impl PasswordsConfig { + /// Whether password-based authentication is enabled + #[must_use] + pub fn enabled(&self) -> bool { + self.enabled + } + + /// Minimum complexity of passwords, from 0 to 4, according to the zxcvbn + /// scorer. + #[must_use] + pub fn minimum_complexity(&self) -> u8 { + self.minimum_complexity + } + + /// Load the password hashing schemes defined by the config + /// + /// # Errors + /// + /// Returns an error if the config is invalid, or if the secret file could + /// not be read. + pub async fn load( + &self, + ) -> Result, Option>, bool)>, anyhow::Error> { + let mut schemes: Vec<&HashingScheme> = self.schemes.iter().collect(); + schemes.sort_unstable_by_key(|a| Reverse(a.version)); + schemes.dedup_by_key(|a| a.version); + + if schemes.len() != self.schemes.len() { + // Some schemes had duplicated versions + bail!("Multiple password schemes have the same versions"); + } + + if schemes.is_empty() { + bail!("Requires at least one password scheme in the config"); + } + + let mut mapped_result = Vec::with_capacity(schemes.len()); + + for scheme in schemes { + let secret = match (&scheme.secret, &scheme.secret_file) { + (Some(secret), None) => Some(secret.clone().into_bytes()), + (None, Some(secret_file)) => { + let secret = tokio::fs::read(secret_file).await?; + Some(secret) + } + (Some(_), Some(_)) => bail!("Cannot specify both `secret` and `secret_file`"), + (None, None) => None, + }; + + mapped_result.push(( + scheme.version, + scheme.algorithm, + scheme.cost, + secret, + scheme.unicode_normalization, + )); + } + + Ok(mapped_result) + } +} + +#[allow(clippy::trivially_copy_pass_by_ref)] +const fn is_default_false(value: &bool) -> bool { + !*value +} + +/// Parameters for a password hashing scheme +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct HashingScheme { + /// The version of the hashing scheme. They must be unique, and the highest + /// version will be used for hashing new passwords. + pub version: u16, + + /// The hashing algorithm to use + pub algorithm: Algorithm, + + /// Whether to apply Unicode normalization to the password before hashing + /// + /// Defaults to `false`, and generally recommended to stay false. This is + /// although recommended when importing password hashs from Synapse, as it + /// applies an NFKC normalization to the password before hashing it. + #[serde(default, skip_serializing_if = "is_default_false")] + pub unicode_normalization: bool, + + /// Cost for the bcrypt algorithm + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(default = "default_bcrypt_cost")] + pub cost: Option, + + /// An optional secret to use when hashing passwords. This makes it harder + /// to brute-force the passwords in case of a database leak. + #[serde(skip_serializing_if = "Option::is_none")] + pub secret: Option, + + /// Same as `secret`, but read from a file. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + pub secret_file: Option, +} + +#[allow(clippy::unnecessary_wraps)] +fn default_bcrypt_cost() -> Option { + Some(12) +} + +/// A hashing algorithm +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default)] +#[serde(rename_all = "lowercase")] +pub enum Algorithm { + /// bcrypt + Bcrypt, + + /// argon2id + #[default] + Argon2id, + + /// PBKDF2 + Pbkdf2, +} diff --git a/matrix-authentication-service/crates/config/src/sections/policy.rs b/matrix-authentication-service/crates/config/src/sections/policy.rs new file mode 100644 index 00000000..3b816b71 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/policy.rs @@ -0,0 +1,178 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use camino::Utf8PathBuf; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; + +use super::ConfigurationSection; + +#[cfg(not(any(feature = "docker", feature = "dist")))] +fn default_policy_path() -> Utf8PathBuf { + "./policies/policy.wasm".into() +} + +#[cfg(feature = "docker")] +fn default_policy_path() -> Utf8PathBuf { + "/usr/local/share/mas-cli/policy.wasm".into() +} + +#[cfg(feature = "dist")] +fn default_policy_path() -> Utf8PathBuf { + "./share/policy.wasm".into() +} + +fn is_default_policy_path(value: &Utf8PathBuf) -> bool { + *value == default_policy_path() +} + +fn default_client_registration_entrypoint() -> String { + "client_registration/violation".to_owned() +} + +fn is_default_client_registration_entrypoint(value: &String) -> bool { + *value == default_client_registration_entrypoint() +} + +fn default_register_entrypoint() -> String { + "register/violation".to_owned() +} + +fn is_default_register_entrypoint(value: &String) -> bool { + *value == default_register_entrypoint() +} + +fn default_authorization_grant_entrypoint() -> String { + "authorization_grant/violation".to_owned() +} + +fn is_default_authorization_grant_entrypoint(value: &String) -> bool { + *value == default_authorization_grant_entrypoint() +} + +fn default_password_entrypoint() -> String { + "password/violation".to_owned() +} + +fn is_default_password_entrypoint(value: &String) -> bool { + *value == default_password_entrypoint() +} + +fn default_compat_login_entrypoint() -> String { + "compat_login/violation".to_owned() +} + +fn is_default_compat_login_entrypoint(value: &String) -> bool { + *value == default_compat_login_entrypoint() +} + +fn default_email_entrypoint() -> String { + "email/violation".to_owned() +} + +fn is_default_email_entrypoint(value: &String) -> bool { + *value == default_email_entrypoint() +} + +fn default_data() -> serde_json::Value { + serde_json::json!({}) +} + +fn is_default_data(value: &serde_json::Value) -> bool { + *value == default_data() +} + +/// Application secrets +#[serde_as] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct PolicyConfig { + /// Path to the WASM module + #[serde( + default = "default_policy_path", + skip_serializing_if = "is_default_policy_path" + )] + #[schemars(with = "String")] + pub wasm_module: Utf8PathBuf, + + /// Entrypoint to use when evaluating client registrations + #[serde( + default = "default_client_registration_entrypoint", + skip_serializing_if = "is_default_client_registration_entrypoint" + )] + pub client_registration_entrypoint: String, + + /// Entrypoint to use when evaluating user registrations + #[serde( + default = "default_register_entrypoint", + skip_serializing_if = "is_default_register_entrypoint" + )] + pub register_entrypoint: String, + + /// Entrypoint to use when evaluating authorization grants + #[serde( + default = "default_authorization_grant_entrypoint", + skip_serializing_if = "is_default_authorization_grant_entrypoint" + )] + pub authorization_grant_entrypoint: String, + + /// Entrypoint to use when evaluating compatibility logins + #[serde( + default = "default_compat_login_entrypoint", + skip_serializing_if = "is_default_compat_login_entrypoint" + )] + pub compat_login_entrypoint: String, + + /// Entrypoint to use when changing password + #[serde( + default = "default_password_entrypoint", + skip_serializing_if = "is_default_password_entrypoint" + )] + pub password_entrypoint: String, + + /// Entrypoint to use when adding an email address + #[serde( + default = "default_email_entrypoint", + skip_serializing_if = "is_default_email_entrypoint" + )] + pub email_entrypoint: String, + + /// Arbitrary data to pass to the policy + #[serde(default = "default_data", skip_serializing_if = "is_default_data")] + pub data: serde_json::Value, +} + +impl Default for PolicyConfig { + fn default() -> Self { + Self { + wasm_module: default_policy_path(), + client_registration_entrypoint: default_client_registration_entrypoint(), + register_entrypoint: default_register_entrypoint(), + authorization_grant_entrypoint: default_authorization_grant_entrypoint(), + compat_login_entrypoint: default_compat_login_entrypoint(), + password_entrypoint: default_password_entrypoint(), + email_entrypoint: default_email_entrypoint(), + data: default_data(), + } + } +} + +impl PolicyConfig { + /// Returns true if the configuration is the default one + pub(crate) fn is_default(&self) -> bool { + is_default_policy_path(&self.wasm_module) + && is_default_client_registration_entrypoint(&self.client_registration_entrypoint) + && is_default_register_entrypoint(&self.register_entrypoint) + && is_default_authorization_grant_entrypoint(&self.authorization_grant_entrypoint) + && is_default_password_entrypoint(&self.password_entrypoint) + && is_default_email_entrypoint(&self.email_entrypoint) + && is_default_data(&self.data) + } +} + +impl ConfigurationSection for PolicyConfig { + const PATH: Option<&'static str> = Some("policy"); +} diff --git a/matrix-authentication-service/crates/config/src/sections/rate_limiting.rs b/matrix-authentication-service/crates/config/src/sections/rate_limiting.rs new file mode 100644 index 00000000..0b7c95db --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/rate_limiting.rs @@ -0,0 +1,298 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{num::NonZeroU32, time::Duration}; + +use governor::Quota; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize, de::Error as _}; + +use crate::ConfigurationSection; + +/// Configuration related to sending emails +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +pub struct RateLimitingConfig { + /// Account Recovery-specific rate limits + #[serde(default)] + pub account_recovery: AccountRecoveryRateLimitingConfig, + + /// Login-specific rate limits + #[serde(default)] + pub login: LoginRateLimitingConfig, + + /// Controls how many registrations attempts are permitted + /// based on source address. + #[serde(default = "default_registration")] + pub registration: RateLimiterConfiguration, + + /// Email authentication-specific rate limits + #[serde(default)] + pub email_authentication: EmailauthenticationRateLimitingConfig, +} + +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +pub struct LoginRateLimitingConfig { + /// Controls how many login attempts are permitted + /// based on source IP address. + /// This can protect against brute force login attempts. + /// + /// Note: this limit also applies to password checks when a user attempts to + /// change their own password. + #[serde(default = "default_login_per_ip")] + pub per_ip: RateLimiterConfiguration, + + /// Controls how many login attempts are permitted + /// based on the account that is being attempted to be logged into. + /// This can protect against a distributed brute force attack + /// but should be set high enough to prevent someone's account being + /// casually locked out. + /// + /// Note: this limit also applies to password checks when a user attempts to + /// change their own password. + #[serde(default = "default_login_per_account")] + pub per_account: RateLimiterConfiguration, +} + +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +pub struct AccountRecoveryRateLimitingConfig { + /// Controls how many account recovery attempts are permitted + /// based on source IP address. + /// This can protect against causing e-mail spam to many targets. + /// + /// Note: this limit also applies to re-sends. + #[serde(default = "default_account_recovery_per_ip")] + pub per_ip: RateLimiterConfiguration, + + /// Controls how many account recovery attempts are permitted + /// based on the e-mail address entered into the recovery form. + /// This can protect against causing e-mail spam to one target. + /// + /// Note: this limit also applies to re-sends. + #[serde(default = "default_account_recovery_per_address")] + pub per_address: RateLimiterConfiguration, +} + +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +pub struct EmailauthenticationRateLimitingConfig { + /// Controls how many email authentication attempts are permitted + /// based on the source IP address. + /// This can protect against causing e-mail spam to many targets. + #[serde(default = "default_email_authentication_per_ip")] + pub per_ip: RateLimiterConfiguration, + + /// Controls how many email authentication attempts are permitted + /// based on the e-mail address entered into the authentication form. + /// This can protect against causing e-mail spam to one target. + /// + /// Note: this limit also applies to re-sends. + #[serde(default = "default_email_authentication_per_address")] + pub per_address: RateLimiterConfiguration, + + /// Controls how many authentication emails are permitted to be sent per + /// authentication session. This ensures not too many authentication codes + /// are created for the same authentication session. + #[serde(default = "default_email_authentication_emails_per_session")] + pub emails_per_session: RateLimiterConfiguration, + + /// Controls how many code authentication attempts are permitted per + /// authentication session. This can protect against brute-forcing the + /// code. + #[serde(default = "default_email_authentication_attempt_per_session")] + pub attempt_per_session: RateLimiterConfiguration, +} + +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +pub struct RateLimiterConfiguration { + /// A one-off burst of actions that the user can perform + /// in one go without waiting. + pub burst: NonZeroU32, + /// How quickly the allowance replenishes, in number of actions per second. + /// Can be fractional to replenish slower. + pub per_second: f64, +} + +impl ConfigurationSection for RateLimitingConfig { + const PATH: Option<&'static str> = Some("rate_limiting"); + + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + let metadata = figment.find_metadata(Self::PATH.unwrap()); + + let error_on_field = |mut error: figment::error::Error, field: &'static str| { + error.metadata = metadata.cloned(); + error.profile = Some(figment::Profile::Default); + error.path = vec![Self::PATH.unwrap().to_owned(), field.to_owned()]; + error + }; + + let error_on_nested_field = + |mut error: figment::error::Error, container: &'static str, field: &'static str| { + error.metadata = metadata.cloned(); + error.profile = Some(figment::Profile::Default); + error.path = vec![ + Self::PATH.unwrap().to_owned(), + container.to_owned(), + field.to_owned(), + ]; + error + }; + + // Check one limiter's configuration for errors + let error_on_limiter = + |limiter: &RateLimiterConfiguration| -> Option { + let recip = limiter.per_second.recip(); + // period must be at least 1 nanosecond according to the governor library + if recip < 1.0e-9 || !recip.is_finite() { + return Some(figment::error::Error::custom( + "`per_second` must be a number that is more than zero and less than 1_000_000_000 (1e9)", + )); + } + + None + }; + + if let Some(error) = error_on_limiter(&self.account_recovery.per_ip) { + return Err(error_on_nested_field(error, "account_recovery", "per_ip").into()); + } + if let Some(error) = error_on_limiter(&self.account_recovery.per_address) { + return Err(error_on_nested_field(error, "account_recovery", "per_address").into()); + } + + if let Some(error) = error_on_limiter(&self.registration) { + return Err(error_on_field(error, "registration").into()); + } + + if let Some(error) = error_on_limiter(&self.login.per_ip) { + return Err(error_on_nested_field(error, "login", "per_ip").into()); + } + if let Some(error) = error_on_limiter(&self.login.per_account) { + return Err(error_on_nested_field(error, "login", "per_account").into()); + } + + Ok(()) + } +} + +impl RateLimitingConfig { + pub(crate) fn is_default(config: &RateLimitingConfig) -> bool { + config == &RateLimitingConfig::default() + } +} + +impl RateLimiterConfiguration { + pub fn to_quota(self) -> Option { + let reciprocal = self.per_second.recip(); + if !reciprocal.is_finite() { + return None; + } + Some(Quota::with_period(Duration::from_secs_f64(reciprocal))?.allow_burst(self.burst)) + } +} + +fn default_login_per_ip() -> RateLimiterConfiguration { + RateLimiterConfiguration { + burst: NonZeroU32::new(3).unwrap(), + per_second: 3.0 / 60.0, + } +} + +fn default_login_per_account() -> RateLimiterConfiguration { + RateLimiterConfiguration { + burst: NonZeroU32::new(1800).unwrap(), + per_second: 1800.0 / 3600.0, + } +} + +fn default_registration() -> RateLimiterConfiguration { + RateLimiterConfiguration { + burst: NonZeroU32::new(3).unwrap(), + per_second: 3.0 / 3600.0, + } +} + +fn default_account_recovery_per_ip() -> RateLimiterConfiguration { + RateLimiterConfiguration { + burst: NonZeroU32::new(3).unwrap(), + per_second: 3.0 / 3600.0, + } +} + +fn default_account_recovery_per_address() -> RateLimiterConfiguration { + RateLimiterConfiguration { + burst: NonZeroU32::new(3).unwrap(), + per_second: 1.0 / 3600.0, + } +} + +fn default_email_authentication_per_ip() -> RateLimiterConfiguration { + RateLimiterConfiguration { + burst: NonZeroU32::new(5).unwrap(), + per_second: 1.0 / 60.0, + } +} + +fn default_email_authentication_per_address() -> RateLimiterConfiguration { + RateLimiterConfiguration { + burst: NonZeroU32::new(3).unwrap(), + per_second: 1.0 / 3600.0, + } +} + +fn default_email_authentication_emails_per_session() -> RateLimiterConfiguration { + RateLimiterConfiguration { + burst: NonZeroU32::new(2).unwrap(), + per_second: 1.0 / 300.0, + } +} + +fn default_email_authentication_attempt_per_session() -> RateLimiterConfiguration { + RateLimiterConfiguration { + burst: NonZeroU32::new(10).unwrap(), + per_second: 1.0 / 60.0, + } +} + +impl Default for RateLimitingConfig { + fn default() -> Self { + RateLimitingConfig { + login: LoginRateLimitingConfig::default(), + registration: default_registration(), + account_recovery: AccountRecoveryRateLimitingConfig::default(), + email_authentication: EmailauthenticationRateLimitingConfig::default(), + } + } +} + +impl Default for LoginRateLimitingConfig { + fn default() -> Self { + LoginRateLimitingConfig { + per_ip: default_login_per_ip(), + per_account: default_login_per_account(), + } + } +} + +impl Default for AccountRecoveryRateLimitingConfig { + fn default() -> Self { + AccountRecoveryRateLimitingConfig { + per_ip: default_account_recovery_per_ip(), + per_address: default_account_recovery_per_address(), + } + } +} + +impl Default for EmailauthenticationRateLimitingConfig { + fn default() -> Self { + EmailauthenticationRateLimitingConfig { + per_ip: default_email_authentication_per_ip(), + per_address: default_email_authentication_per_address(), + emails_per_session: default_email_authentication_emails_per_session(), + attempt_per_session: default_email_authentication_attempt_per_session(), + } + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/secrets.rs b/matrix-authentication-service/crates/config/src/sections/secrets.rs new file mode 100644 index 00000000..98feb672 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/secrets.rs @@ -0,0 +1,709 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::borrow::Cow; + +use anyhow::{Context, bail}; +use camino::Utf8PathBuf; +use futures_util::future::{try_join, try_join_all}; +use mas_jose::jwk::{JsonWebKey, JsonWebKeySet, Thumbprint}; +use mas_keystore::{Encrypter, Keystore, PrivateKey}; +use rand::{Rng, SeedableRng, distributions::Standard, prelude::Distribution as _}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; +use tokio::task; +use tracing::info; + +use super::ConfigurationSection; + +/// Password config option. +/// +/// It either holds the password value directly or references a file where the +/// password is stored. +#[derive(Clone, Debug)] +pub enum Password { + File(Utf8PathBuf), + Value(String), +} + +/// Password fields as serialized in JSON. +#[derive(JsonSchema, Serialize, Deserialize, Clone, Debug)] +struct PasswordRaw { + #[schemars(with = "Option")] + #[serde(skip_serializing_if = "Option::is_none")] + password_file: Option, + #[serde(skip_serializing_if = "Option::is_none")] + password: Option, +} + +impl TryFrom for Option { + type Error = anyhow::Error; + + fn try_from(value: PasswordRaw) -> Result { + match (value.password, value.password_file) { + (None, None) => Ok(None), + (None, Some(path)) => Ok(Some(Password::File(path))), + (Some(password), None) => Ok(Some(Password::Value(password))), + (Some(_), Some(_)) => bail!("Cannot specify both `password` and `password_file`"), + } + } +} + +impl From> for PasswordRaw { + fn from(value: Option) -> Self { + match value { + Some(Password::File(path)) => PasswordRaw { + password_file: Some(path), + password: None, + }, + Some(Password::Value(password)) => PasswordRaw { + password_file: None, + password: Some(password), + }, + None => PasswordRaw { + password_file: None, + password: None, + }, + } + } +} + +/// Key config option. +/// +/// It either holds the key value directly or references a file where the key is +/// stored. +#[derive(Clone, Debug)] +pub enum Key { + File(Utf8PathBuf), + Value(String), +} + +/// Key fields as serialized in JSON. +#[derive(JsonSchema, Serialize, Deserialize, Clone, Debug)] +struct KeyRaw { + #[schemars(with = "Option")] + #[serde(skip_serializing_if = "Option::is_none")] + key_file: Option, + #[serde(skip_serializing_if = "Option::is_none")] + key: Option, +} + +impl TryFrom for Key { + type Error = anyhow::Error; + + fn try_from(value: KeyRaw) -> Result { + match (value.key, value.key_file) { + (None, None) => bail!("Missing `key` or `key_file`"), + (None, Some(path)) => Ok(Key::File(path)), + (Some(key), None) => Ok(Key::Value(key)), + (Some(_), Some(_)) => bail!("Cannot specify both `key` and `key_file`"), + } + } +} + +impl From for KeyRaw { + fn from(value: Key) -> Self { + match value { + Key::File(path) => KeyRaw { + key_file: Some(path), + key: None, + }, + Key::Value(key) => KeyRaw { + key_file: None, + key: Some(key), + }, + } + } +} + +/// A single key with its key ID and optional password. +#[serde_as] +#[derive(JsonSchema, Serialize, Deserialize, Clone, Debug)] +pub struct KeyConfig { + /// The key ID `kid` of the key as used by JWKs. + /// + /// If not given, `kid` will be the key’s RFC 7638 JWK Thumbprint. + #[serde(skip_serializing_if = "Option::is_none")] + kid: Option, + + #[schemars(with = "PasswordRaw")] + #[serde_as(as = "serde_with::TryFromInto")] + #[serde(flatten)] + password: Option, + + #[schemars(with = "KeyRaw")] + #[serde_as(as = "serde_with::TryFromInto")] + #[serde(flatten)] + key: Key, +} + +impl KeyConfig { + /// Returns the password in case any is provided. + /// + /// If `password_file` was given, the password is read from that file. + async fn password(&self) -> anyhow::Result>> { + Ok(match &self.password { + Some(Password::File(path)) => Some(Cow::Owned(tokio::fs::read(path).await?)), + Some(Password::Value(password)) => Some(Cow::Borrowed(password.as_bytes())), + None => None, + }) + } + + /// Returns the key. + /// + /// If `key_file` was given, the key is read from that file. + async fn key(&self) -> anyhow::Result> { + Ok(match &self.key { + Key::File(path) => Cow::Owned(tokio::fs::read(path).await?), + Key::Value(key) => Cow::Borrowed(key.as_bytes()), + }) + } + + /// Returns the JSON Web Key derived from this key config. + /// + /// Password and/or key are read from file if they’re given as path. + async fn json_web_key(&self) -> anyhow::Result> { + let (key, password) = try_join(self.key(), self.password()).await?; + + let private_key = match password { + Some(password) => PrivateKey::load_encrypted(&key, password)?, + None => PrivateKey::load(&key)?, + }; + + let kid = match self.kid.clone() { + Some(kid) => kid, + None => private_key.thumbprint_sha256_base64(), + }; + + Ok(JsonWebKey::new(private_key) + .with_kid(kid) + .with_use(mas_iana::jose::JsonWebKeyUse::Sig)) + } +} + +/// Encryption config option. +#[derive(Debug, Clone)] +pub enum Encryption { + File(Utf8PathBuf), + Value([u8; 32]), +} + +/// Encryption fields as serialized in JSON. +#[serde_as] +#[derive(JsonSchema, Serialize, Deserialize, Debug, Clone)] +struct EncryptionRaw { + /// File containing the encryption key for secure cookies. + #[schemars(with = "Option")] + #[serde(skip_serializing_if = "Option::is_none")] + encryption_file: Option, + + /// Encryption key for secure cookies. + #[schemars( + with = "Option", + regex(pattern = r"[0-9a-fA-F]{64}"), + example = &"0000111122223333444455556666777788889999aaaabbbbccccddddeeeeffff" + )] + #[serde_as(as = "Option")] + #[serde(skip_serializing_if = "Option::is_none")] + encryption: Option<[u8; 32]>, +} + +impl TryFrom for Encryption { + type Error = anyhow::Error; + + fn try_from(value: EncryptionRaw) -> Result { + match (value.encryption, value.encryption_file) { + (None, None) => bail!("Missing `encryption` or `encryption_file`"), + (None, Some(path)) => Ok(Encryption::File(path)), + (Some(encryption), None) => Ok(Encryption::Value(encryption)), + (Some(_), Some(_)) => bail!("Cannot specify both `encryption` and `encryption_file`"), + } + } +} + +impl From for EncryptionRaw { + fn from(value: Encryption) -> Self { + match value { + Encryption::File(path) => EncryptionRaw { + encryption_file: Some(path), + encryption: None, + }, + Encryption::Value(encryption) => EncryptionRaw { + encryption_file: None, + encryption: Some(encryption), + }, + } + } +} + +/// Reads all keys from the given directory. +async fn key_configs_from_path(path: &Utf8PathBuf) -> anyhow::Result> { + let mut result = vec![]; + let mut read_dir = tokio::fs::read_dir(path).await?; + while let Some(dir_entry) = read_dir.next_entry().await? { + if !dir_entry.path().is_file() { + continue; + } + result.push(KeyConfig { + kid: None, + password: None, + key: Key::File(dir_entry.path().try_into()?), + }); + } + Ok(result) +} + +/// Application secrets +#[serde_as] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct SecretsConfig { + /// Encryption key for secure cookies + #[schemars(with = "EncryptionRaw")] + #[serde_as(as = "serde_with::TryFromInto")] + #[serde(flatten)] + encryption: Encryption, + + /// List of private keys to use for signing and encrypting payloads. + #[serde(skip_serializing_if = "Option::is_none")] + keys: Option>, + + /// Directory of private keys to use for signing and encrypting payloads. + #[schemars(with = "Option")] + #[serde(skip_serializing_if = "Option::is_none")] + keys_dir: Option, +} + +impl SecretsConfig { + /// Derive a signing and verifying keystore out of the config + /// + /// # Errors + /// + /// Returns an error when a key could not be imported + #[tracing::instrument(name = "secrets.load", skip_all)] + pub async fn key_store(&self) -> anyhow::Result { + let key_configs = self.key_configs().await?; + let web_keys = try_join_all(key_configs.iter().map(KeyConfig::json_web_key)).await?; + + Ok(Keystore::new(JsonWebKeySet::new(web_keys))) + } + + /// Derive an [`Encrypter`] out of the config + /// + /// # Errors + /// + /// Returns an error when the Encryptor can not be created. + pub async fn encrypter(&self) -> anyhow::Result { + Ok(Encrypter::new(&self.encryption().await?)) + } + + /// Returns the encryption secret. + /// + /// # Errors + /// + /// Returns an error when the encryption secret could not be read from file. + pub async fn encryption(&self) -> anyhow::Result<[u8; 32]> { + // Read the encryption secret either embedded in the config file or on disk + match self.encryption { + Encryption::Value(encryption) => Ok(encryption), + Encryption::File(ref path) => { + let mut bytes = [0; 32]; + let content = tokio::fs::read(path).await?; + hex::decode_to_slice(content, &mut bytes).context( + "Content of `encryption_file` must contain hex characters \ + encoding exactly 32 bytes", + )?; + Ok(bytes) + } + } + } + + /// Returns a combined list of key configs given inline and from files. + /// + /// If `keys_dir` was given, the keys are read from file. + async fn key_configs(&self) -> anyhow::Result> { + let mut key_configs = match &self.keys_dir { + Some(keys_dir) => key_configs_from_path(keys_dir).await?, + None => vec![], + }; + + let inline_key_configs = self.keys.as_deref().unwrap_or_default(); + key_configs.extend(inline_key_configs.iter().cloned()); + + Ok(key_configs) + } +} + +impl ConfigurationSection for SecretsConfig { + const PATH: Option<&'static str> = Some("secrets"); +} + +impl SecretsConfig { + #[expect(clippy::similar_names, reason = "Key type names are very similar")] + #[tracing::instrument(skip_all)] + pub(crate) async fn generate(mut rng: R) -> anyhow::Result + where + R: Rng + Send, + { + info!("Generating keys..."); + + let span = tracing::info_span!("rsa"); + let key_rng = rand_chacha::ChaChaRng::from_rng(&mut rng)?; + let rsa_key = task::spawn_blocking(move || { + let _entered = span.enter(); + let ret = PrivateKey::generate_rsa(key_rng).unwrap(); + info!("Done generating RSA key"); + ret + }) + .await + .context("could not join blocking task")?; + let rsa_key = KeyConfig { + kid: None, + password: None, + key: Key::Value(rsa_key.to_pem(pem_rfc7468::LineEnding::LF)?.to_string()), + }; + + let span = tracing::info_span!("ec_p256"); + let key_rng = rand_chacha::ChaChaRng::from_rng(&mut rng)?; + let ec_p256_key = task::spawn_blocking(move || { + let _entered = span.enter(); + let ret = PrivateKey::generate_ec_p256(key_rng); + info!("Done generating EC P-256 key"); + ret + }) + .await + .context("could not join blocking task")?; + let ec_p256_key = KeyConfig { + kid: None, + password: None, + key: Key::Value(ec_p256_key.to_pem(pem_rfc7468::LineEnding::LF)?.to_string()), + }; + + let span = tracing::info_span!("ec_p384"); + let key_rng = rand_chacha::ChaChaRng::from_rng(&mut rng)?; + let ec_p384_key = task::spawn_blocking(move || { + let _entered = span.enter(); + let ret = PrivateKey::generate_ec_p384(key_rng); + info!("Done generating EC P-384 key"); + ret + }) + .await + .context("could not join blocking task")?; + let ec_p384_key = KeyConfig { + kid: None, + password: None, + key: Key::Value(ec_p384_key.to_pem(pem_rfc7468::LineEnding::LF)?.to_string()), + }; + + let span = tracing::info_span!("ec_k256"); + let key_rng = rand_chacha::ChaChaRng::from_rng(&mut rng)?; + let ec_k256_key = task::spawn_blocking(move || { + let _entered = span.enter(); + let ret = PrivateKey::generate_ec_k256(key_rng); + info!("Done generating EC secp256k1 key"); + ret + }) + .await + .context("could not join blocking task")?; + let ec_k256_key = KeyConfig { + kid: None, + password: None, + key: Key::Value(ec_k256_key.to_pem(pem_rfc7468::LineEnding::LF)?.to_string()), + }; + + Ok(Self { + encryption: Encryption::Value(Standard.sample(&mut rng)), + keys: Some(vec![rsa_key, ec_p256_key, ec_p384_key, ec_k256_key]), + keys_dir: None, + }) + } + + pub(crate) fn test() -> Self { + let rsa_key = KeyConfig { + kid: None, + password: None, + key: Key::Value( + indoc::indoc! {r" + -----BEGIN PRIVATE KEY----- + MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEAymS2RkeIZo7pUeEN + QUGCG4GLJru5jzxomO9jiNr5D/oRcerhpQVc9aCpBfAAg4l4a1SmYdBzWqX0X5pU + scgTtQIDAQABAkEArNIMlrxUK4bSklkCcXtXdtdKE9vuWfGyOw0GyAB69fkEUBxh + 3j65u+u3ZmW+bpMWHgp1FtdobE9nGwb2VBTWAQIhAOyU1jiUEkrwKK004+6b5QRE + vC9UI2vDWy5vioMNx5Y1AiEA2wGAJ6ETF8FF2Vd+kZlkKK7J0em9cl0gbJDsWIEw + N4ECIEyWYkMurD1WQdTQqnk0Po+DMOihdFYOiBYgRdbnPxWBAiEAmtd0xJAd7622 + tPQniMnrBtiN2NxqFXHCev/8Gpc8gAECIBcaPcF59qVeRmYrfqzKBxFm7LmTwlAl + Gh7BNzCeN+D6 + -----END PRIVATE KEY----- + "} + .to_owned(), + ), + }; + let ecdsa_key = KeyConfig { + kid: None, + password: None, + key: Key::Value( + indoc::indoc! {r" + -----BEGIN PRIVATE KEY----- + MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgqfn5mYO/5Qq/wOOiWgHA + NaiDiepgUJ2GI5eq2V8D8nahRANCAARMK9aKUd/H28qaU+0qvS6bSJItzAge1VHn + OhBAAUVci1RpmUA+KdCL5sw9nadAEiONeiGr+28RYHZmlB9qXnjC + -----END PRIVATE KEY----- + "} + .to_owned(), + ), + }; + + Self { + encryption: Encryption::Value([0xEA; 32]), + keys: Some(vec![rsa_key, ecdsa_key]), + keys_dir: None, + } + } +} + +#[cfg(test)] +mod tests { + use figment::{ + Figment, Jail, + providers::{Format, Yaml}, + }; + use mas_jose::constraints::Constrainable; + use tokio::{runtime::Handle, task}; + + use super::*; + + #[tokio::test] + async fn load_config() { + task::spawn_blocking(|| { + Jail::expect_with(|jail| { + jail.create_file( + "config.yaml", + indoc::indoc! {r" + secrets: + encryption_file: encryption + keys_dir: keys + "}, + )?; + jail.create_file( + "encryption", + "0000111122223333444455556666777788889999aaaabbbbccccddddeeeeffff", + )?; + jail.create_dir("keys")?; + jail.create_file( + "keys/key1", + indoc::indoc! {r" + -----BEGIN RSA PRIVATE KEY----- + MIIJKQIBAAKCAgEA6oR6LXzJOziUxcRryonLTM5Xkfr9cYPCKvnwsWoAHfd2MC6Q + OCAWSQnNcNz5RTeQUcLEaA8sxQi64zpCwO9iH8y8COCaO8u9qGkOOuJwWnmPfeLs + cEwALEp0LZ67eSUPsMaz533bs4C8p+2UPMd+v7Td8TkkYoqgUrfYuT0bDTMYVsSe + wcNB5qsI7hDLf1t5FX6KU79/Asn1K3UYHTdN83mghOlM4zh1l1CJdtgaE1jAg4Ml + 1X8yG+cT+Ks8gCSGQfIAlVFV4fvvzmpokNKfwAI/b3LS2/ft4ZrK+RCTsWsjUu38 + Zr8jbQMtDznzBHMw1LoaHpwRNjbJZ7uA6x5ikbwz5NAlfCITTta6xYn8qvaBfiYJ + YyUFl0kIHm9Kh9V9p54WPMCFCcQx12deovKV82S6zxTeMflDdosJDB/uG9dT2qPt + wkpTD6xAOx5h59IhfiY0j4ScTl725GygVzyK378soP3LQ/vBixQLpheALViotodH + fJknsrelaISNkrnapZL3QE5C1SUoaUtMG9ovRz5HDpMx5ooElEklq7shFWDhZXbp + 2ndU5RPRCZO3Szop/Xhn2mNWQoEontFh79WIf+wS8TkJIRXhjtYBt3+s96z0iqSg + gDmE8BcP4lP1+TAUY1d7+QEhGCsTJa9TYtfDtNNfuYI9e3mq6LEpHYKWOvECAwEA + AQKCAgAlF60HaCGf50lzT6eePQCAdnEtWrMeyDCRgZTLStvCjEhk7d3LssTeP9mp + oe8fPomUv6c3BOds2/5LQFockABHd/y/CV9RA973NclAEQlPlhiBrb793Vd4VJJe + 6331dveDW0+ggVdFjfVzjhqQfnE9ZcsQ2JvjpiTI0Iv2cy7F01tke0GCSMgx8W1p + J2jjDOxwNOKGGoIT8S4roHVJnFy3nM4sbNtyDj+zHimP4uBE8m2zSgQAP60E8sia + 3+Ki1flnkXJRgQWCHR9cg5dkXfFRz56JmcdgxAHGWX2vD9XRuFi5nitPc6iTw8PV + u7GvS3+MC0oO+1pRkTAhOGv3RDK3Uqmy2zrMUuWkEsz6TVId6gPl7+biRJcP+aER + plJkeC9J9nSizbQPwErGByzoHGLjADgBs9hwqYkPcN38b6jR5S/VDQ+RncCyI87h + s/0pIs/fNlfw4LtpBrolP6g++vo6KUufmE3kRNN9dN4lNOoKjUGkcmX6MGnwxiw6 + NN/uEqf9+CKQele1XeUhRPNJc9Gv+3Ly5y/wEi6FjfVQmCK4hNrl3tvuZw+qkGbq + Au9Jhk7wV81An7fbhBRIXrwOY9AbOKNqUfY+wpKi5vyJFS1yzkFaYSTKTBspkuHW + pWbohO+KreREwaR5HOMK8tQMTLEAeE3taXGsQMJSJ15lRrLc7QKCAQEA68TV/R8O + C4p+vnGJyhcfDJt6+KBKWlroBy75BG7Dg7/rUXaj+MXcqHi+whRNXMqZchSwzUfS + B2WK/HrOBye8JLKDeA3B5TumJaF19vV7EY/nBF2QdRmI1r33Cp+RWUvAcjKa/v2u + KksV3btnJKXCu/stdAyTK7nU0on4qBzm5WZxuIJv6VMHLDNPFdCk+4gM8LuJ3ITU + l7XuZd4gXccPNj0VTeOYiMjIwxtNmE9RpCkTLm92Z7MI+htciGk1xvV0N4m1BXwA + 7qhl1nBgVuJyux4dEYFIeQNhLpHozkEz913QK2gDAHL9pAeiUYJntq4p8HNvfHiQ + vE3wTzil3aUFnwKCAQEA/qQm1Nx5By6an5UunrOvltbTMjsZSDnWspSQbX//j6mL + 2atQLe3y/Nr7E5SGZ1kFD9tgAHTuTGVqjvTqp5dBPw4uo146K2RJwuvaYUzNK26c + VoGfMfsI+/bfMfjFnEmGRARZdMr8cvhU+2m04hglsSnNGxsvvPdsiIbRaVDx+JvN + C5C281WlN0WeVd7zNTZkdyUARNXfCxBHQPuYkP5Mz2roZeYlJMWU04i8Cx0/SEuu + bhZQDaNTccSdPDFYcyDDlpqp+mN+U7m+yUPOkVpaxQiSYJZ+NOQsNcAVYfjzyY0E + /VP3s2GddjCJs0amf9SeW0LiMAHPgTp8vbMSRPVVbwKCAQEAmZsSd+llsys2TEmY + pivONN6PjbCRALE9foCiCLtJcmr1m4uaZRg0HScd0UB87rmoo2TLk9L5CYyksr4n + wQ2oTJhpgywjaYAlTVsWiiGBXv3MW1HCLijGuHHno+o2PmFWLpC93ufUMwXcZywT + lRLR/rs07+jJcbGO8OSnNpAt9sN5z+Zblz5a6/c5zVK0SpRnKehld2CrSXRkr8W6 + fJ6WUJYXbTmdRXDbLBJ7yYHUBQolzxkboZBJhvmQnec9/DQq1YxIfhw+Vz8rqjxo + 5/J9IWALPD5owz7qb/bsIITmoIFkgQMxAXfpvJaksEov3Bs4g8oRlpzOX4C/0j1s + Ay3irQKCAQEAwRJ/qufcEFkCvjsj1QsS+MC785shyUSpiE/izlO91xTLx+f/7EM9 + +QCkXK1B1zyE/Qft24rNYDmJOQl0nkuuGfxL2mzImDv7PYMM2reb3PGKMoEnzoKz + xi/h/YbNdnm9BvdxSH/cN+QYs2Pr1X5Pneu+622KnbHQphfq0fqg7Upchwdb4Faw + 5Z6wthVMvK0YMcppUMgEzOOz0w6xGEbowGAkA5cj1KTG+jjzs02ivNM9V5Utb5nF + 3D4iphAYK3rNMfTlKsejciIlCX+TMVyb9EdSjU+uM7ZJ2xtgWx+i4NA+10GCT42V + EZct4TORbN0ukK2+yH2m8yoAiOks0gJemwKCAQAMGROGt8O4HfhpUdOq01J2qvQL + m5oUXX8w1I95XcoAwCqb+dIan8UbCyl/79lbqNpQlHbRy3wlXzWwH9aHKsfPlCvk + 5dE1qrdMdQhLXwP109bRmTiScuU4zfFgHw3XgQhMFXxNp9pze197amLws0TyuBW3 + fupS4kM5u6HKCeBYcw2WP5ukxf8jtn29tohLBiA2A7NYtml9xTer6BBP0DTh+QUn + IJL6jSpuCNxBPKIK7p6tZZ0nMBEdAWMxglYm0bmHpTSd3pgu3ltCkYtDlDcTIaF0 + Q4k44lxUTZQYwtKUVQXBe4ZvaT/jIEMS7K5bsAy7URv/toaTaiEh1hguwSmf + -----END RSA PRIVATE KEY----- + "}, + )?; + jail.create_file( + "keys/key2", + indoc::indoc! {r" + -----BEGIN EC PRIVATE KEY----- + MHcCAQEEIKlZz/GnH0idVH1PnAF4HQNwRafgBaE2tmyN1wjfdOQqoAoGCCqGSM49 + AwEHoUQDQgAEHrgPeG+Mt8eahih1h4qaPjhl7jT25cdzBkg3dbVks6gBR2Rx4ug9 + h27LAir5RqxByHvua2XsP46rSTChof78uw== + -----END EC PRIVATE KEY----- + "}, + )?; + + let config = Figment::new() + .merge(Yaml::file("config.yaml")) + .extract_inner::("secrets")?; + + Handle::current().block_on(async move { + assert!( + matches!(config.encryption, Encryption::File(ref p) if p == "encryption") + ); + assert_eq!( + config.encryption().await.unwrap(), + [ + 0, 0, 17, 17, 34, 34, 51, 51, 68, 68, 85, 85, 102, 102, 119, 119, 136, + 136, 153, 153, 170, 170, 187, 187, 204, 204, 221, 221, 238, 238, 255, + 255 + ] + ); + + let mut key_config = config.key_configs().await.unwrap(); + key_config.sort_by_key(|a| { + if let Key::File(p) = &a.key { + Some(p.clone()) + } else { + None + } + }); + let key_store = config.key_store().await.unwrap(); + + assert!(key_config[0].kid.is_none()); + assert!(matches!(&key_config[0].key, Key::File(p) if p == "keys/key1")); + assert!(key_store.iter().any(|k| k.kid() == Some("xmgGCzGtQFmhEOP0YAqBt-oZyVauSVMXcf4kwcgGZLc"))); + assert!(key_config[1].kid.is_none()); + assert!(matches!(&key_config[1].key, Key::File(p) if p == "keys/key2")); + assert!(key_store.iter().any(|k| k.kid() == Some("ONUCn80fsiISFWKrVMEiirNVr-QEvi7uQI0QH9q9q4o"))); + }); + + Ok(()) + }); + }) + .await + .unwrap(); + } + + #[tokio::test] + async fn load_config_inline_secrets() { + task::spawn_blocking(|| { + Jail::expect_with(|jail| { + jail.create_file( + "config.yaml", + indoc::indoc! {r" + secrets: + encryption: >- + 0000111122223333444455556666777788889999aaaabbbbccccddddeeeeffff + keys: + - kid: lekid0 + key: | + -----BEGIN EC PRIVATE KEY----- + MHcCAQEEIOtZfDuXZr/NC0V3sisR4Chf7RZg6a2dpZesoXMlsPeRoAoGCCqGSM49 + AwEHoUQDQgAECfpqx64lrR85MOhdMxNmIgmz8IfmM5VY9ICX9aoaArnD9FjgkBIl + fGmQWxxXDSWH6SQln9tROVZaduenJqDtDw== + -----END EC PRIVATE KEY----- + - key: | + -----BEGIN EC PRIVATE KEY----- + MHcCAQEEIKlZz/GnH0idVH1PnAF4HQNwRafgBaE2tmyN1wjfdOQqoAoGCCqGSM49 + AwEHoUQDQgAEHrgPeG+Mt8eahih1h4qaPjhl7jT25cdzBkg3dbVks6gBR2Rx4ug9 + h27LAir5RqxByHvua2XsP46rSTChof78uw== + -----END EC PRIVATE KEY----- + "}, + )?; + + let config = Figment::new() + .merge(Yaml::file("config.yaml")) + .extract_inner::("secrets")?; + + Handle::current().block_on(async move { + assert_eq!( + config.encryption().await.unwrap(), + [ + 0, 0, 17, 17, 34, 34, 51, 51, 68, 68, 85, 85, 102, 102, 119, 119, 136, + 136, 153, 153, 170, 170, 187, 187, 204, 204, 221, 221, 238, 238, 255, + 255 + ] + ); + + let key_store = config.key_store().await.unwrap(); + assert!(key_store.iter().any(|k| k.kid() == Some("lekid0"))); + assert!(key_store.iter().any(|k| k.kid() == Some("ONUCn80fsiISFWKrVMEiirNVr-QEvi7uQI0QH9q9q4o"))); + }); + + Ok(()) + }); + }) + .await + .unwrap(); + } + + #[tokio::test] + async fn load_config_mixed_key_sources() { + task::spawn_blocking(|| { + Jail::expect_with(|jail| { + jail.create_file( + "config.yaml", + indoc::indoc! {r" + secrets: + encryption_file: encryption + keys_dir: keys + keys: + - kid: lekid0 + key: | + -----BEGIN EC PRIVATE KEY----- + MHcCAQEEIOtZfDuXZr/NC0V3sisR4Chf7RZg6a2dpZesoXMlsPeRoAoGCCqGSM49 + AwEHoUQDQgAECfpqx64lrR85MOhdMxNmIgmz8IfmM5VY9ICX9aoaArnD9FjgkBIl + fGmQWxxXDSWH6SQln9tROVZaduenJqDtDw== + -----END EC PRIVATE KEY----- + "}, + )?; + jail.create_dir("keys")?; + jail.create_file( + "keys/key_from_file", + indoc::indoc! {r" + -----BEGIN EC PRIVATE KEY----- + MHcCAQEEIKlZz/GnH0idVH1PnAF4HQNwRafgBaE2tmyN1wjfdOQqoAoGCCqGSM49 + AwEHoUQDQgAEHrgPeG+Mt8eahih1h4qaPjhl7jT25cdzBkg3dbVks6gBR2Rx4ug9 + h27LAir5RqxByHvua2XsP46rSTChof78uw== + -----END EC PRIVATE KEY----- + "}, + )?; + + let config = Figment::new() + .merge(Yaml::file("config.yaml")) + .extract_inner::("secrets")?; + + Handle::current().block_on(async move { + let key_config = config.key_configs().await.unwrap(); + let key_store = config.key_store().await.unwrap(); + + assert!(key_config[0].kid.is_none()); + assert!(matches!(&key_config[0].key, Key::File(p) if p == "keys/key_from_file")); + assert!(key_store.iter().any(|k| k.kid() == Some("ONUCn80fsiISFWKrVMEiirNVr-QEvi7uQI0QH9q9q4o"))); + assert!(key_store.iter().any(|k| k.kid() == Some("lekid0"))); + }); + + Ok(()) + }); + }) + .await + .unwrap(); + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/telemetry.rs b/matrix-authentication-service/crates/config/src/sections/telemetry.rs new file mode 100644 index 00000000..9d9308fd --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/telemetry.rs @@ -0,0 +1,221 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use schemars::JsonSchema; +use serde::{Deserialize, Serialize, de::Error as _}; +use serde_with::skip_serializing_none; +use url::Url; + +use super::ConfigurationSection; + +/// Propagation format for incoming and outgoing requests +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "lowercase")] +pub enum Propagator { + /// Propagate according to the W3C Trace Context specification + TraceContext, + + /// Propagate according to the W3C Baggage specification + Baggage, + + /// Propagate trace context with Jaeger compatible headers + Jaeger, +} + +#[allow(clippy::unnecessary_wraps)] +fn otlp_endpoint_default() -> Option { + Some("https://localhost:4318".to_owned()) +} + +/// Exporter to use when exporting traces +#[skip_serializing_none] +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "lowercase")] +pub enum TracingExporterKind { + /// Don't export traces + #[default] + None, + + /// Export traces to the standard output. Only useful for debugging + Stdout, + + /// Export traces to an OpenTelemetry protocol compatible endpoint + Otlp, +} + +/// Configuration related to exporting traces +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] +pub struct TracingConfig { + /// Exporter to use when exporting traces + #[serde(default)] + pub exporter: TracingExporterKind, + + /// OTLP exporter: OTLP over HTTP compatible endpoint + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(url, default = "otlp_endpoint_default")] + pub endpoint: Option, + + /// List of propagation formats to use for incoming and outgoing requests + #[serde(default)] + pub propagators: Vec, + + /// Sample rate for traces + /// + /// Defaults to `1.0` if not set. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(example = 0.5, range(min = 0.0, max = 1.0))] + pub sample_rate: Option, +} + +impl TracingConfig { + /// Returns true if all fields are at their default values + fn is_default(&self) -> bool { + matches!(self.exporter, TracingExporterKind::None) + && self.endpoint.is_none() + && self.propagators.is_empty() + } +} + +/// Exporter to use when exporting metrics +#[skip_serializing_none] +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "lowercase")] +pub enum MetricsExporterKind { + /// Don't export metrics + #[default] + None, + + /// Export metrics to stdout. Only useful for debugging + Stdout, + + /// Export metrics to an OpenTelemetry protocol compatible endpoint + Otlp, + + /// Export metrics via Prometheus. An HTTP listener with the `prometheus` + /// resource must be setup to expose the Promethes metrics. + Prometheus, +} + +/// Configuration related to exporting metrics +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] +pub struct MetricsConfig { + /// Exporter to use when exporting metrics + #[serde(default)] + pub exporter: MetricsExporterKind, + + /// OTLP exporter: OTLP over HTTP compatible endpoint + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(url, default = "otlp_endpoint_default")] + pub endpoint: Option, +} + +impl MetricsConfig { + /// Returns true if all fields are at their default values + fn is_default(&self) -> bool { + matches!(self.exporter, MetricsExporterKind::None) && self.endpoint.is_none() + } +} + +/// Configuration related to the Sentry integration +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] +pub struct SentryConfig { + /// Sentry DSN + #[schemars(url, example = &"https://public@host:port/1")] + #[serde(skip_serializing_if = "Option::is_none")] + pub dsn: Option, + + /// Environment to use when sending events to Sentry + /// + /// Defaults to `production` if not set. + #[schemars(example = &"production")] + #[serde(skip_serializing_if = "Option::is_none")] + pub environment: Option, + + /// Sample rate for event submissions + /// + /// Defaults to `1.0` if not set. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(example = 0.5, range(min = 0.0, max = 1.0))] + pub sample_rate: Option, + + /// Sample rate for tracing transactions + /// + /// Defaults to `0.0` if not set. + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(example = 0.5, range(min = 0.0, max = 1.0))] + pub traces_sample_rate: Option, +} + +impl SentryConfig { + /// Returns true if all fields are at their default values + fn is_default(&self) -> bool { + self.dsn.is_none() + } +} + +/// Configuration related to sending monitoring data +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] +pub struct TelemetryConfig { + /// Configuration related to exporting traces + #[serde(default, skip_serializing_if = "TracingConfig::is_default")] + pub tracing: TracingConfig, + + /// Configuration related to exporting metrics + #[serde(default, skip_serializing_if = "MetricsConfig::is_default")] + pub metrics: MetricsConfig, + + /// Configuration related to the Sentry integration + #[serde(default, skip_serializing_if = "SentryConfig::is_default")] + pub sentry: SentryConfig, +} + +impl TelemetryConfig { + /// Returns true if all fields are at their default values + pub(crate) fn is_default(&self) -> bool { + self.tracing.is_default() && self.metrics.is_default() && self.sentry.is_default() + } +} + +impl ConfigurationSection for TelemetryConfig { + const PATH: Option<&'static str> = Some("telemetry"); + + fn validate( + &self, + _figment: &figment::Figment, + ) -> Result<(), Box> { + if let Some(sample_rate) = self.sentry.sample_rate + && !(0.0..=1.0).contains(&sample_rate) + { + return Err(figment::error::Error::custom( + "Sentry sample rate must be between 0.0 and 1.0", + ) + .with_path("sentry.sample_rate") + .into()); + } + + if let Some(sample_rate) = self.sentry.traces_sample_rate + && !(0.0..=1.0).contains(&sample_rate) + { + return Err(figment::error::Error::custom( + "Sentry sample rate must be between 0.0 and 1.0", + ) + .with_path("sentry.traces_sample_rate") + .into()); + } + + if let Some(sample_rate) = self.tracing.sample_rate + && !(0.0..=1.0).contains(&sample_rate) + { + return Err(figment::error::Error::custom( + "Tracing sample rate must be between 0.0 and 1.0", + ) + .with_path("tracing.sample_rate") + .into()); + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/config/src/sections/templates.rs b/matrix-authentication-service/crates/config/src/sections/templates.rs new file mode 100644 index 00000000..5656de44 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/templates.rs @@ -0,0 +1,116 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use camino::Utf8PathBuf; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use super::ConfigurationSection; + +#[cfg(not(any(feature = "docker", feature = "dist")))] +fn default_path() -> Utf8PathBuf { + "./templates/".into() +} + +#[cfg(feature = "docker")] +fn default_path() -> Utf8PathBuf { + "/usr/local/share/mas-cli/templates/".into() +} + +#[cfg(feature = "dist")] +fn default_path() -> Utf8PathBuf { + "./share/templates/".into() +} + +fn is_default_path(value: &Utf8PathBuf) -> bool { + *value == default_path() +} + +#[cfg(not(any(feature = "docker", feature = "dist")))] +fn default_assets_path() -> Utf8PathBuf { + "./frontend/dist/manifest.json".into() +} + +#[cfg(feature = "docker")] +fn default_assets_path() -> Utf8PathBuf { + "/usr/local/share/mas-cli/manifest.json".into() +} + +#[cfg(feature = "dist")] +fn default_assets_path() -> Utf8PathBuf { + "./share/manifest.json".into() +} + +fn is_default_assets_path(value: &Utf8PathBuf) -> bool { + *value == default_assets_path() +} + +#[cfg(not(any(feature = "docker", feature = "dist")))] +fn default_translations_path() -> Utf8PathBuf { + "./translations/".into() +} + +#[cfg(feature = "docker")] +fn default_translations_path() -> Utf8PathBuf { + "/usr/local/share/mas-cli/translations/".into() +} + +#[cfg(feature = "dist")] +fn default_translations_path() -> Utf8PathBuf { + "./share/translations/".into() +} + +fn is_default_translations_path(value: &Utf8PathBuf) -> bool { + *value == default_translations_path() +} + +/// Configuration related to templates +#[derive(Debug, Serialize, Deserialize, JsonSchema, Clone)] +pub struct TemplatesConfig { + /// Path to the folder which holds the templates + #[serde(default = "default_path", skip_serializing_if = "is_default_path")] + #[schemars(with = "Option")] + pub path: Utf8PathBuf, + + /// Path to the assets manifest + #[serde( + default = "default_assets_path", + skip_serializing_if = "is_default_assets_path" + )] + #[schemars(with = "Option")] + pub assets_manifest: Utf8PathBuf, + + /// Path to the translations + #[serde( + default = "default_translations_path", + skip_serializing_if = "is_default_translations_path" + )] + #[schemars(with = "Option")] + pub translations_path: Utf8PathBuf, +} + +impl Default for TemplatesConfig { + fn default() -> Self { + Self { + path: default_path(), + assets_manifest: default_assets_path(), + translations_path: default_translations_path(), + } + } +} + +impl TemplatesConfig { + /// Returns true if all fields are at their default values + pub(crate) fn is_default(&self) -> bool { + is_default_path(&self.path) + && is_default_assets_path(&self.assets_manifest) + && is_default_translations_path(&self.translations_path) + } +} + +impl ConfigurationSection for TemplatesConfig { + const PATH: Option<&'static str> = Some("templates"); +} diff --git a/matrix-authentication-service/crates/config/src/sections/upstream_oauth2.rs b/matrix-authentication-service/crates/config/src/sections/upstream_oauth2.rs new file mode 100644 index 00000000..40591b00 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/sections/upstream_oauth2.rs @@ -0,0 +1,803 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::BTreeMap; + +use camino::Utf8PathBuf; +use mas_iana::jose::JsonWebSignatureAlg; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize, de::Error}; +use serde_with::{serde_as, skip_serializing_none}; +use ulid::Ulid; +use url::Url; + +use crate::{ClientSecret, ClientSecretRaw, ConfigurationSection}; + +/// Upstream OAuth 2.0 providers configuration +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)] +pub struct UpstreamOAuth2Config { + /// List of OAuth 2.0 providers + pub providers: Vec, +} + +impl UpstreamOAuth2Config { + /// Returns true if the configuration is the default one + pub(crate) fn is_default(&self) -> bool { + self.providers.is_empty() + } +} + +impl ConfigurationSection for UpstreamOAuth2Config { + const PATH: Option<&'static str> = Some("upstream_oauth2"); + + fn validate( + &self, + figment: &figment::Figment, + ) -> Result<(), Box> { + for (index, provider) in self.providers.iter().enumerate() { + let annotate = |mut error: figment::Error| { + error.metadata = figment + .find_metadata(&format!("{root}.providers", root = Self::PATH.unwrap())) + .cloned(); + error.profile = Some(figment::Profile::Default); + error.path = vec![ + Self::PATH.unwrap().to_owned(), + "providers".to_owned(), + index.to_string(), + ]; + error + }; + + if !matches!(provider.discovery_mode, DiscoveryMode::Disabled) + && provider.issuer.is_none() + { + return Err(annotate(figment::Error::custom( + "The `issuer` field is required when discovery is enabled", + )) + .into()); + } + + match provider.token_endpoint_auth_method { + TokenAuthMethod::None + | TokenAuthMethod::PrivateKeyJwt + | TokenAuthMethod::SignInWithApple => { + if provider.client_secret.is_some() { + return Err(annotate(figment::Error::custom( + "Unexpected field `client_secret` for the selected authentication method", + )).into()); + } + } + TokenAuthMethod::ClientSecretBasic + | TokenAuthMethod::ClientSecretPost + | TokenAuthMethod::ClientSecretJwt => { + if provider.client_secret.is_none() { + return Err(annotate(figment::Error::missing_field("client_secret")).into()); + } + } + } + + match provider.token_endpoint_auth_method { + TokenAuthMethod::None + | TokenAuthMethod::ClientSecretBasic + | TokenAuthMethod::ClientSecretPost + | TokenAuthMethod::SignInWithApple => { + if provider.token_endpoint_auth_signing_alg.is_some() { + return Err(annotate(figment::Error::custom( + "Unexpected field `token_endpoint_auth_signing_alg` for the selected authentication method", + )).into()); + } + } + TokenAuthMethod::ClientSecretJwt | TokenAuthMethod::PrivateKeyJwt => { + if provider.token_endpoint_auth_signing_alg.is_none() { + return Err(annotate(figment::Error::missing_field( + "token_endpoint_auth_signing_alg", + )) + .into()); + } + } + } + + match provider.token_endpoint_auth_method { + TokenAuthMethod::SignInWithApple => { + if provider.sign_in_with_apple.is_none() { + return Err( + annotate(figment::Error::missing_field("sign_in_with_apple")).into(), + ); + } + } + + _ => { + if provider.sign_in_with_apple.is_some() { + return Err(annotate(figment::Error::custom( + "Unexpected field `sign_in_with_apple` for the selected authentication method", + )).into()); + } + } + } + + if provider.claims_imports.skip_confirmation { + if provider.claims_imports.localpart.action != ImportAction::Require { + return Err(annotate(figment::Error::custom( + "The field `action` must be `require` when `skip_confirmation` is set to `true`", + )).with_path("claims_imports.localpart").into()); + } + + if provider.claims_imports.email.action == ImportAction::Suggest { + return Err(annotate(figment::Error::custom( + "The field `action` must not be `suggest` when `skip_confirmation` is set to `true`", + )).with_path("claims_imports.email").into()); + } + + if provider.claims_imports.displayname.action == ImportAction::Suggest { + return Err(annotate(figment::Error::custom( + "The field `action` must not be `suggest` when `skip_confirmation` is set to `true`", + )).with_path("claims_imports.displayname").into()); + } + } + + if matches!( + provider.claims_imports.localpart.on_conflict, + OnConflict::Add | OnConflict::Replace | OnConflict::Set + ) && !matches!( + provider.claims_imports.localpart.action, + ImportAction::Force | ImportAction::Require + ) { + return Err(annotate(figment::Error::custom( + "The field `action` must be either `force` or `require` when `on_conflict` is set to `add`, `replace` or `set`", + )).with_path("claims_imports.localpart").into()); + } + } + + Ok(()) + } +} + +/// The response mode we ask the provider to use for the callback +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum ResponseMode { + /// `query`: The provider will send the response as a query string in the + /// URL search parameters + Query, + + /// `form_post`: The provider will send the response as a POST request with + /// the response parameters in the request body + /// + /// + FormPost, +} + +/// Authentication methods used against the OAuth 2.0 provider +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum TokenAuthMethod { + /// `none`: No authentication + None, + + /// `client_secret_basic`: `client_id` and `client_secret` used as basic + /// authorization credentials + ClientSecretBasic, + + /// `client_secret_post`: `client_id` and `client_secret` sent in the + /// request body + ClientSecretPost, + + /// `client_secret_jwt`: a `client_assertion` sent in the request body and + /// signed using the `client_secret` + ClientSecretJwt, + + /// `private_key_jwt`: a `client_assertion` sent in the request body and + /// signed by an asymmetric key + PrivateKeyJwt, + + /// `sign_in_with_apple`: a special method for Signin with Apple + SignInWithApple, +} + +/// How to handle a claim +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default, JsonSchema)] +#[serde(rename_all = "lowercase")] +pub enum ImportAction { + /// Ignore the claim + #[default] + Ignore, + + /// Suggest the claim value, but allow the user to change it + Suggest, + + /// Force the claim value, but don't fail if it is missing + Force, + + /// Force the claim value, and fail if it is missing + Require, +} + +impl ImportAction { + #[allow(clippy::trivially_copy_pass_by_ref)] + const fn is_default(&self) -> bool { + matches!(self, ImportAction::Ignore) + } +} + +/// How to handle an existing localpart claim +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default, JsonSchema)] +#[serde(rename_all = "lowercase")] +pub enum OnConflict { + /// Fails the upstream OAuth 2.0 login on conflict + #[default] + Fail, + + /// Adds the upstream OAuth 2.0 identity link, regardless of whether there + /// is an existing link or not + Add, + + /// Replace any existing upstream OAuth 2.0 identity link + Replace, + + /// Adds the upstream OAuth 2.0 identity link *only* if there is no existing + /// link for this provider on the matching user + Set, +} + +impl OnConflict { + #[allow(clippy::trivially_copy_pass_by_ref)] + const fn is_default(&self) -> bool { + matches!(self, OnConflict::Fail) + } +} + +/// What should be done for the subject attribute +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default, JsonSchema)] +pub struct SubjectImportPreference { + /// The Jinja2 template to use for the subject attribute + /// + /// If not provided, the default template is `{{ user.sub }}` + #[serde(default, skip_serializing_if = "Option::is_none")] + pub template: Option, +} + +impl SubjectImportPreference { + const fn is_default(&self) -> bool { + self.template.is_none() + } +} + +/// What should be done for the localpart attribute +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default, JsonSchema)] +pub struct LocalpartImportPreference { + /// How to handle the attribute + #[serde(default, skip_serializing_if = "ImportAction::is_default")] + pub action: ImportAction, + + /// The Jinja2 template to use for the localpart attribute + /// + /// If not provided, the default template is `{{ user.preferred_username }}` + #[serde(default, skip_serializing_if = "Option::is_none")] + pub template: Option, + + /// How to handle conflicts on the claim, default value is `Fail` + #[serde(default, skip_serializing_if = "OnConflict::is_default")] + pub on_conflict: OnConflict, +} + +impl LocalpartImportPreference { + const fn is_default(&self) -> bool { + self.action.is_default() && self.template.is_none() + } +} + +/// What should be done for the displayname attribute +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default, JsonSchema)] +pub struct DisplaynameImportPreference { + /// How to handle the attribute + #[serde(default, skip_serializing_if = "ImportAction::is_default")] + pub action: ImportAction, + + /// The Jinja2 template to use for the displayname attribute + /// + /// If not provided, the default template is `{{ user.name }}` + #[serde(default, skip_serializing_if = "Option::is_none")] + pub template: Option, +} + +impl DisplaynameImportPreference { + const fn is_default(&self) -> bool { + self.action.is_default() && self.template.is_none() + } +} + +/// What should be done with the email attribute +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default, JsonSchema)] +pub struct EmailImportPreference { + /// How to handle the claim + #[serde(default, skip_serializing_if = "ImportAction::is_default")] + pub action: ImportAction, + + /// The Jinja2 template to use for the email address attribute + /// + /// If not provided, the default template is `{{ user.email }}` + #[serde(default, skip_serializing_if = "Option::is_none")] + pub template: Option, +} + +impl EmailImportPreference { + const fn is_default(&self) -> bool { + self.action.is_default() && self.template.is_none() + } +} + +/// What should be done for the account name attribute +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default, JsonSchema)] +pub struct AccountNameImportPreference { + /// The Jinja2 template to use for the account name. This name is only used + /// for display purposes. + /// + /// If not provided, it will be ignored. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub template: Option, +} + +impl AccountNameImportPreference { + const fn is_default(&self) -> bool { + self.template.is_none() + } +} + +/// How claims should be imported +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default, JsonSchema)] +pub struct ClaimsImports { + /// How to determine the subject of the user + #[serde(default, skip_serializing_if = "SubjectImportPreference::is_default")] + pub subject: SubjectImportPreference, + + /// Whether to skip the interactive screen prompting the user to confirm the + /// attributes that are being imported. This requires `localpart.action` to + /// be `require` and other attribute actions to be either `ignore`, `force` + /// or `require` + #[serde(default, skip_serializing_if = "std::ops::Not::not")] + pub skip_confirmation: bool, + + /// Import the localpart of the MXID + #[serde(default, skip_serializing_if = "LocalpartImportPreference::is_default")] + pub localpart: LocalpartImportPreference, + + /// Import the displayname of the user. + #[serde( + default, + skip_serializing_if = "DisplaynameImportPreference::is_default" + )] + pub displayname: DisplaynameImportPreference, + + /// Import the email address of the user + #[serde(default, skip_serializing_if = "EmailImportPreference::is_default")] + pub email: EmailImportPreference, + + /// Set a human-readable name for the upstream account for display purposes + #[serde( + default, + skip_serializing_if = "AccountNameImportPreference::is_default" + )] + pub account_name: AccountNameImportPreference, +} + +impl ClaimsImports { + const fn is_default(&self) -> bool { + self.subject.is_default() + && self.localpart.is_default() + && !self.skip_confirmation + && self.displayname.is_default() + && self.email.is_default() + && self.account_name.is_default() + } +} + +/// How to discover the provider's configuration +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum DiscoveryMode { + /// Use OIDC discovery with strict metadata verification + #[default] + Oidc, + + /// Use OIDC discovery with relaxed metadata verification + Insecure, + + /// Use a static configuration + Disabled, +} + +impl DiscoveryMode { + #[allow(clippy::trivially_copy_pass_by_ref)] + const fn is_default(&self) -> bool { + matches!(self, DiscoveryMode::Oidc) + } +} + +/// Whether to use proof key for code exchange (PKCE) when requesting and +/// exchanging the token. +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum PkceMethod { + /// Use PKCE if the provider supports it + /// + /// Defaults to no PKCE if provider discovery is disabled + #[default] + Auto, + + /// Always use PKCE with the S256 challenge method + Always, + + /// Never use PKCE + Never, +} + +impl PkceMethod { + #[allow(clippy::trivially_copy_pass_by_ref)] + const fn is_default(&self) -> bool { + matches!(self, PkceMethod::Auto) + } +} + +fn default_true() -> bool { + true +} + +#[allow(clippy::trivially_copy_pass_by_ref)] +fn is_default_true(value: &bool) -> bool { + *value +} + +#[allow(clippy::ref_option)] +fn is_signed_response_alg_default(signed_response_alg: &JsonWebSignatureAlg) -> bool { + *signed_response_alg == signed_response_alg_default() +} + +#[allow(clippy::unnecessary_wraps)] +fn signed_response_alg_default() -> JsonWebSignatureAlg { + JsonWebSignatureAlg::Rs256 +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct SignInWithApple { + /// The private key file used to sign the `id_token` + #[serde(skip_serializing_if = "Option::is_none")] + #[schemars(with = "Option")] + pub private_key_file: Option, + + /// The private key used to sign the `id_token` + #[serde(skip_serializing_if = "Option::is_none")] + pub private_key: Option, + + /// The Team ID of the Apple Developer Portal + pub team_id: String, + + /// The key ID of the Apple Developer Portal + pub key_id: String, +} + +fn default_scope() -> String { + "openid".to_owned() +} + +fn is_default_scope(scope: &str) -> bool { + scope == default_scope() +} + +/// What to do when receiving an OIDC Backchannel logout request. +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum OnBackchannelLogout { + /// Do nothing + #[default] + DoNothing, + + /// Only log out the MAS 'browser session' started by this OIDC session + LogoutBrowserOnly, + + /// Log out all sessions started by this OIDC session, including MAS + /// 'browser sessions' and client sessions + LogoutAll, +} + +impl OnBackchannelLogout { + #[allow(clippy::trivially_copy_pass_by_ref)] + const fn is_default(&self) -> bool { + matches!(self, OnBackchannelLogout::DoNothing) + } +} + +/// Configuration for one upstream OAuth 2 provider. +#[serde_as] +#[skip_serializing_none] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +pub struct Provider { + /// Whether this provider is enabled. + /// + /// Defaults to `true` + #[serde(default = "default_true", skip_serializing_if = "is_default_true")] + pub enabled: bool, + + /// An internal unique identifier for this provider + #[schemars( + with = "String", + regex(pattern = r"^[0123456789ABCDEFGHJKMNPQRSTVWXYZ]{26}$"), + description = "A ULID as per https://github.com/ulid/spec" + )] + pub id: Ulid, + + /// The ID of the provider that was used by Synapse. + /// In order to perform a Synapse-to-MAS migration, this must be specified. + /// + /// ## For providers that used OAuth 2.0 or OpenID Connect in Synapse + /// + /// ### For `oidc_providers`: + /// This should be specified as `oidc-` followed by the ID that was + /// configured as `idp_id` in one of the `oidc_providers` in the Synapse + /// configuration. + /// For example, if Synapse's configuration contained `idp_id: wombat` for + /// this provider, then specify `oidc-wombat` here. + /// + /// ### For `oidc_config` (legacy): + /// Specify `oidc` here. + #[serde(skip_serializing_if = "Option::is_none")] + pub synapse_idp_id: Option, + + /// The OIDC issuer URL + /// + /// This is required if OIDC discovery is enabled (which is the default) + #[serde(skip_serializing_if = "Option::is_none")] + pub issuer: Option, + + /// A human-readable name for the provider, that will be shown to users + #[serde(skip_serializing_if = "Option::is_none")] + pub human_name: Option, + + /// A brand identifier used to customise the UI, e.g. `apple`, `google`, + /// `github`, etc. + /// + /// Values supported by the default template are: + /// + /// - `apple` + /// - `google` + /// - `facebook` + /// - `github` + /// - `gitlab` + /// - `twitter` + /// - `discord` + #[serde(skip_serializing_if = "Option::is_none")] + pub brand_name: Option, + + /// The client ID to use when authenticating with the provider + pub client_id: String, + + /// The client secret to use when authenticating with the provider + /// + /// Used by the `client_secret_basic`, `client_secret_post`, and + /// `client_secret_jwt` methods + #[schemars(with = "ClientSecretRaw")] + #[serde_as(as = "serde_with::TryFromInto")] + #[serde(flatten)] + pub client_secret: Option, + + /// The method to authenticate the client with the provider + pub token_endpoint_auth_method: TokenAuthMethod, + + /// Additional parameters for the `sign_in_with_apple` method + #[serde(skip_serializing_if = "Option::is_none")] + pub sign_in_with_apple: Option, + + /// The JWS algorithm to use when authenticating the client with the + /// provider + /// + /// Used by the `client_secret_jwt` and `private_key_jwt` methods + #[serde(skip_serializing_if = "Option::is_none")] + pub token_endpoint_auth_signing_alg: Option, + + /// Expected signature for the JWT payload returned by the token + /// authentication endpoint. + /// + /// Defaults to `RS256`. + #[serde( + default = "signed_response_alg_default", + skip_serializing_if = "is_signed_response_alg_default" + )] + pub id_token_signed_response_alg: JsonWebSignatureAlg, + + /// The scopes to request from the provider + /// + /// Defaults to `openid`. + #[serde(default = "default_scope", skip_serializing_if = "is_default_scope")] + pub scope: String, + + /// How to discover the provider's configuration + /// + /// Defaults to `oidc`, which uses OIDC discovery with strict metadata + /// verification + #[serde(default, skip_serializing_if = "DiscoveryMode::is_default")] + pub discovery_mode: DiscoveryMode, + + /// Whether to use proof key for code exchange (PKCE) when requesting and + /// exchanging the token. + /// + /// Defaults to `auto`, which uses PKCE if the provider supports it. + #[serde(default, skip_serializing_if = "PkceMethod::is_default")] + pub pkce_method: PkceMethod, + + /// Whether to fetch the user profile from the userinfo endpoint, + /// or to rely on the data returned in the `id_token` from the + /// `token_endpoint`. + /// + /// Defaults to `false`. + #[serde(default)] + pub fetch_userinfo: bool, + + /// Expected signature for the JWT payload returned by the userinfo + /// endpoint. + /// + /// If not specified, the response is expected to be an unsigned JSON + /// payload. + #[serde(skip_serializing_if = "Option::is_none")] + pub userinfo_signed_response_alg: Option, + + /// The URL to use for the provider's authorization endpoint + /// + /// Defaults to the `authorization_endpoint` provided through discovery + #[serde(skip_serializing_if = "Option::is_none")] + pub authorization_endpoint: Option, + + /// The URL to use for the provider's userinfo endpoint + /// + /// Defaults to the `userinfo_endpoint` provided through discovery + #[serde(skip_serializing_if = "Option::is_none")] + pub userinfo_endpoint: Option, + + /// The URL to use for the provider's token endpoint + /// + /// Defaults to the `token_endpoint` provided through discovery + #[serde(skip_serializing_if = "Option::is_none")] + pub token_endpoint: Option, + + /// The URL to use for getting the provider's public keys + /// + /// Defaults to the `jwks_uri` provided through discovery + #[serde(skip_serializing_if = "Option::is_none")] + pub jwks_uri: Option, + + /// The response mode we ask the provider to use for the callback + #[serde(skip_serializing_if = "Option::is_none")] + pub response_mode: Option, + + /// How claims should be imported from the `id_token` provided by the + /// provider + #[serde(default, skip_serializing_if = "ClaimsImports::is_default")] + pub claims_imports: ClaimsImports, + + /// Additional parameters to include in the authorization request + /// + /// Orders of the keys are not preserved. + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub additional_authorization_parameters: BTreeMap, + + /// Whether the `login_hint` should be forwarded to the provider in the + /// authorization request. + /// + /// Defaults to `false`. + #[serde(default)] + pub forward_login_hint: bool, + + /// What to do when receiving an OIDC Backchannel logout request. + /// + /// Defaults to `do_nothing`. + #[serde(default, skip_serializing_if = "OnBackchannelLogout::is_default")] + pub on_backchannel_logout: OnBackchannelLogout, +} + +impl Provider { + /// Returns the client secret. + /// + /// If `client_secret_file` was given, the secret is read from that file. + /// + /// # Errors + /// + /// Returns an error when the client secret could not be read from file. + pub async fn client_secret(&self) -> anyhow::Result> { + Ok(match &self.client_secret { + Some(client_secret) => Some(client_secret.value().await?), + None => None, + }) + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use figment::{ + Figment, Jail, + providers::{Format, Yaml}, + }; + use tokio::{runtime::Handle, task}; + + use super::*; + + #[tokio::test] + async fn load_config() { + task::spawn_blocking(|| { + Jail::expect_with(|jail| { + jail.create_file( + "config.yaml", + r#" + upstream_oauth2: + providers: + - id: 01GFWR28C4KNE04WG3HKXB7C9R + client_id: upstream-oauth2 + token_endpoint_auth_method: none + + - id: 01GFWR32NCQ12B8Z0J8CPXRRB6 + client_id: upstream-oauth2 + client_secret_file: secret + token_endpoint_auth_method: client_secret_basic + + - id: 01GFWR3WHR93Y5HK389H28VHZ9 + client_id: upstream-oauth2 + client_secret: c1!3n753c237 + token_endpoint_auth_method: client_secret_post + + - id: 01GFWR43R2ZZ8HX9CVBNW9TJWG + client_id: upstream-oauth2 + client_secret_file: secret + token_endpoint_auth_method: client_secret_jwt + + - id: 01GFWR4BNFDCC4QDG6AMSP1VRR + client_id: upstream-oauth2 + token_endpoint_auth_method: private_key_jwt + jwks: + keys: + - kid: "03e84aed4ef4431014e8617567864c4efaaaede9" + kty: "RSA" + alg: "RS256" + use: "sig" + e: "AQAB" + n: "ma2uRyBeSEOatGuDpCiV9oIxlDWix_KypDYuhQfEzqi_BiF4fV266OWfyjcABbam59aJMNvOnKW3u_eZM-PhMCBij5MZ-vcBJ4GfxDJeKSn-GP_dJ09rpDcILh8HaWAnPmMoi4DC0nrfE241wPISvZaaZnGHkOrfN_EnA5DligLgVUbrA5rJhQ1aSEQO_gf1raEOW3DZ_ACU3qhtgO0ZBG3a5h7BPiRs2sXqb2UCmBBgwyvYLDebnpE7AotF6_xBIlR-Cykdap3GHVMXhrIpvU195HF30ZoBU4dMd-AeG6HgRt4Cqy1moGoDgMQfbmQ48Hlunv9_Vi2e2CLvYECcBw" + + - kid: "d01c1abe249269f72ef7ca2613a86c9f05e59567" + kty: "RSA" + alg: "RS256" + use: "sig" + e: "AQAB" + n: "0hukqytPwrj1RbMYhYoepCi3CN5k7DwYkTe_Cmb7cP9_qv4ok78KdvFXt5AnQxCRwBD7-qTNkkfMWO2RxUMBdQD0ED6tsSb1n5dp0XY8dSWiBDCX8f6Hr-KolOpvMLZKRy01HdAWcM6RoL9ikbjYHUEW1C8IJnw3MzVHkpKFDL354aptdNLaAdTCBvKzU9WpXo10g-5ctzSlWWjQuecLMQ4G1mNdsR1LHhUENEnOvgT8cDkX0fJzLbEbyBYkdMgKggyVPEB1bg6evG4fTKawgnf0IDSPxIU-wdS9wdSP9ZCJJPLi5CEp-6t6rE_sb2dGcnzjCGlembC57VwpkUvyMw" + "#, + )?; + jail.create_file("secret", r"c1!3n753c237")?; + + let config = Figment::new() + .merge(Yaml::file("config.yaml")) + .extract_inner::("upstream_oauth2")?; + + assert_eq!(config.providers.len(), 5); + + assert_eq!( + config.providers[1].id, + Ulid::from_str("01GFWR32NCQ12B8Z0J8CPXRRB6").unwrap() + ); + + assert!(config.providers[0].client_secret.is_none()); + assert!(matches!(config.providers[1].client_secret, Some(ClientSecret::File(ref p)) if p == "secret")); + assert!(matches!(config.providers[2].client_secret, Some(ClientSecret::Value(ref v)) if v == "c1!3n753c237")); + assert!(matches!(config.providers[3].client_secret, Some(ClientSecret::File(ref p)) if p == "secret")); + assert!(config.providers[4].client_secret.is_none()); + + Handle::current().block_on(async move { + assert_eq!(config.providers[1].client_secret().await.unwrap().unwrap(), "c1!3n753c237"); + assert_eq!(config.providers[2].client_secret().await.unwrap().unwrap(), "c1!3n753c237"); + assert_eq!(config.providers[3].client_secret().await.unwrap().unwrap(), "c1!3n753c237"); + }); + + Ok(()) + }); + }).await.unwrap(); + } +} diff --git a/matrix-authentication-service/crates/config/src/util.rs b/matrix-authentication-service/crates/config/src/util.rs new file mode 100644 index 00000000..d6cf58c3 --- /dev/null +++ b/matrix-authentication-service/crates/config/src/util.rs @@ -0,0 +1,76 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use figment::Figment; +use serde::de::DeserializeOwned; + +/// Trait implemented by all configuration section to help loading specific part +/// of the config and generate the sample config. +pub trait ConfigurationSection: Sized + DeserializeOwned { + /// Specify where this section should live relative to the root. + const PATH: Option<&'static str> = None; + + /// Validate the configuration section + /// + /// # Errors + /// + /// Returns an error if the configuration is invalid + fn validate( + &self, + _figment: &Figment, + ) -> Result<(), Box> { + Ok(()) + } + + /// Extract configuration from a Figment instance. + /// + /// # Errors + /// + /// Returns an error if the configuration could not be loaded + fn extract( + figment: &Figment, + ) -> Result> { + let this: Self = if let Some(path) = Self::PATH { + figment.extract_inner(path)? + } else { + figment.extract()? + }; + + this.validate(figment)?; + Ok(this) + } +} + +/// Extension trait for [`ConfigurationSection`] to allow extracting the +/// configuration section from a [`Figment`] or return the default value if the +/// section is not present. +pub trait ConfigurationSectionExt: ConfigurationSection + Default { + /// Extract the configuration section from the given [`Figment`], or return + /// the default value if the section is not present. + /// + /// # Errors + /// + /// Returns an error if the configuration section is invalid. + fn extract_or_default( + figment: &Figment, + ) -> Result> { + let this: Self = if let Some(path) = Self::PATH { + // If the configuration section is not present, we return the default value + if !figment.contains(path) { + return Ok(Self::default()); + } + + figment.extract_inner(path)? + } else { + figment.extract()? + }; + + this.validate(figment)?; + Ok(this) + } +} + +impl ConfigurationSectionExt for T {} diff --git a/matrix-authentication-service/crates/context/Cargo.toml b/matrix-authentication-service/crates/context/Cargo.toml new file mode 100644 index 00000000..ad86d142 --- /dev/null +++ b/matrix-authentication-service/crates/context/Cargo.toml @@ -0,0 +1,29 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-context" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +console.workspace = true +pin-project-lite.workspace = true +quanta.workspace = true +tokio.workspace = true +tower-service.workspace = true +tower-layer.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +tracing-opentelemetry.workspace = true +opentelemetry.workspace = true diff --git a/matrix-authentication-service/crates/context/src/fmt.rs b/matrix-authentication-service/crates/context/src/fmt.rs new file mode 100644 index 00000000..47e72fca --- /dev/null +++ b/matrix-authentication-service/crates/context/src/fmt.rs @@ -0,0 +1,143 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use console::{Color, Style}; +use opentelemetry::TraceId; +use tracing::{Level, Subscriber}; +use tracing_opentelemetry::OtelData; +use tracing_subscriber::{ + fmt::{ + FormatEvent, FormatFields, + format::{DefaultFields, Writer}, + time::{FormatTime, SystemTime}, + }, + registry::LookupSpan, +}; + +use crate::LogContext; + +/// An event formatter usable by the [`tracing_subscriber`] crate, which +/// includes the log context and the OTEL trace ID. +#[derive(Debug, Default)] +pub struct EventFormatter; + +struct FmtLevel<'a> { + level: &'a Level, + ansi: bool, +} + +impl<'a> FmtLevel<'a> { + pub(crate) fn new(level: &'a Level, ansi: bool) -> Self { + Self { level, ansi } + } +} + +const TRACE_STR: &str = "TRACE"; +const DEBUG_STR: &str = "DEBUG"; +const INFO_STR: &str = " INFO"; +const WARN_STR: &str = " WARN"; +const ERROR_STR: &str = "ERROR"; + +const TRACE_STYLE: Style = Style::new().fg(Color::Magenta); +const DEBUG_STYLE: Style = Style::new().fg(Color::Blue); +const INFO_STYLE: Style = Style::new().fg(Color::Green); +const WARN_STYLE: Style = Style::new().fg(Color::Yellow); +const ERROR_STYLE: Style = Style::new().fg(Color::Red); + +impl std::fmt::Display for FmtLevel<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let msg = match *self.level { + Level::TRACE => TRACE_STYLE.force_styling(self.ansi).apply_to(TRACE_STR), + Level::DEBUG => DEBUG_STYLE.force_styling(self.ansi).apply_to(DEBUG_STR), + Level::INFO => INFO_STYLE.force_styling(self.ansi).apply_to(INFO_STR), + Level::WARN => WARN_STYLE.force_styling(self.ansi).apply_to(WARN_STR), + Level::ERROR => ERROR_STYLE.force_styling(self.ansi).apply_to(ERROR_STR), + }; + write!(f, "{msg}") + } +} + +struct TargetFmt<'a> { + target: &'a str, + line: Option, +} + +impl<'a> TargetFmt<'a> { + pub(crate) fn new(metadata: &tracing::Metadata<'a>) -> Self { + Self { + target: metadata.target(), + line: metadata.line(), + } + } +} + +impl std::fmt::Display for TargetFmt<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.target)?; + if let Some(line) = self.line { + write!(f, ":{line}")?; + } + Ok(()) + } +} + +impl FormatEvent for EventFormatter +where + S: Subscriber + for<'a> LookupSpan<'a>, + N: for<'writer> FormatFields<'writer> + 'static, +{ + fn format_event( + &self, + ctx: &tracing_subscriber::fmt::FmtContext<'_, S, N>, + mut writer: Writer<'_>, + event: &tracing::Event<'_>, + ) -> std::fmt::Result { + let ansi = writer.has_ansi_escapes(); + let metadata = event.metadata(); + + SystemTime.format_time(&mut writer)?; + + let level = FmtLevel::new(metadata.level(), ansi); + write!(&mut writer, " {level} ")?; + + // If there is no explicit 'name' set in the event macro, it will have the + // 'event {filename}:{line}' value. In this case, we want to display the target: + // the module from where it was emitted. In other cases, we want to + // display the explit name of the event we have set. + let style = Style::new().dim().force_styling(ansi); + if metadata.name().starts_with("event ") { + write!(&mut writer, "{} ", style.apply_to(TargetFmt::new(metadata)))?; + } else { + write!(&mut writer, "{} ", style.apply_to(metadata.name()))?; + } + + LogContext::maybe_with(|log_context| { + let log_context = Style::new() + .bold() + .force_styling(ansi) + .apply_to(log_context); + write!(&mut writer, "{log_context} - ") + }) + .transpose()?; + + let field_fromatter = DefaultFields::new(); + field_fromatter.format_fields(writer.by_ref(), event)?; + + // If we have a OTEL span, we can add the trace ID to the end of the log line + if let Some(span) = ctx.lookup_current() + && let Some(otel) = span.extensions().get::() + && let Some(trace_id) = otel.trace_id() + && trace_id != TraceId::INVALID + { + let label = Style::new() + .italic() + .force_styling(ansi) + .apply_to("trace.id"); + write!(&mut writer, " {label}={trace_id}")?; + } + + writeln!(&mut writer) + } +} diff --git a/matrix-authentication-service/crates/context/src/future.rs b/matrix-authentication-service/crates/context/src/future.rs new file mode 100644 index 00000000..67c77fe6 --- /dev/null +++ b/matrix-authentication-service/crates/context/src/future.rs @@ -0,0 +1,59 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + pin::Pin, + sync::atomic::Ordering, + task::{Context, Poll}, +}; + +use quanta::Instant; +use tokio::task::futures::TaskLocalFuture; + +use crate::LogContext; + +pub type LogContextFuture = TaskLocalFuture>; + +impl LogContext { + /// Wrap a future with the given log context + pub(crate) fn wrap_future(&self, future: F) -> LogContextFuture { + let future = PollRecordingFuture::new(future); + crate::CURRENT_LOG_CONTEXT.scope(self.clone(), future) + } +} + +pin_project_lite::pin_project! { + /// A future which records the elapsed time and the number of polls in the + /// active log context + pub struct PollRecordingFuture { + #[pin] + inner: F, + } +} + +impl PollRecordingFuture { + pub(crate) fn new(inner: F) -> Self { + Self { inner } + } +} + +impl Future for PollRecordingFuture { + type Output = F::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let start = Instant::now(); + let this = self.project(); + let result = this.inner.poll(cx); + + // Record the number of polls and the time we spent polling the future + let elapsed = start.elapsed().as_nanos().try_into().unwrap_or(u64::MAX); + let _ = crate::CURRENT_LOG_CONTEXT.try_with(|c| { + c.inner.polls.fetch_add(1, Ordering::Relaxed); + c.inner.cpu_time.fetch_add(elapsed, Ordering::Relaxed); + }); + + result + } +} diff --git a/matrix-authentication-service/crates/context/src/layer.rs b/matrix-authentication-service/crates/context/src/layer.rs new file mode 100644 index 00000000..eb3f92bf --- /dev/null +++ b/matrix-authentication-service/crates/context/src/layer.rs @@ -0,0 +1,41 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::borrow::Cow; + +use tower_layer::Layer; +use tower_service::Service; + +use crate::LogContextService; + +/// A layer which creates a log context for each request. +pub struct LogContextLayer { + tagger: fn(&R) -> Cow<'static, str>, +} + +impl Clone for LogContextLayer { + fn clone(&self) -> Self { + Self { + tagger: self.tagger, + } + } +} + +impl LogContextLayer { + pub fn new(tagger: fn(&R) -> Cow<'static, str>) -> Self { + Self { tagger } + } +} + +impl Layer for LogContextLayer +where + S: Service, +{ + type Service = LogContextService; + + fn layer(&self, inner: S) -> Self::Service { + LogContextService::new(inner, self.tagger) + } +} diff --git a/matrix-authentication-service/crates/context/src/lib.rs b/matrix-authentication-service/crates/context/src/lib.rs new file mode 100644 index 00000000..c9644282 --- /dev/null +++ b/matrix-authentication-service/crates/context/src/lib.rs @@ -0,0 +1,152 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod fmt; +mod future; +mod layer; +mod service; + +use std::{ + borrow::Cow, + sync::{ + Arc, + atomic::{AtomicU64, Ordering}, + }, + time::Duration, +}; + +use quanta::Instant; +use tokio::task_local; + +pub use self::{ + fmt::EventFormatter, + future::{LogContextFuture, PollRecordingFuture}, + layer::LogContextLayer, + service::LogContextService, +}; + +/// A counter which increments each time we create a new log context +/// It will wrap around if we create more than [`u64::MAX`] contexts +static LOG_CONTEXT_INDEX: AtomicU64 = AtomicU64::new(0); +task_local! { + pub static CURRENT_LOG_CONTEXT: LogContext; +} + +/// A log context saves informations about the current task, such as the +/// elapsed time, the number of polls, and the poll time. +#[derive(Clone)] +pub struct LogContext { + inner: Arc, +} + +struct LogContextInner { + /// A user-defined tag for the log context + tag: Cow<'static, str>, + + /// A unique index for the log context + index: u64, + + /// The time when the context was created + start: Instant, + + /// The number of [`Future::poll`] recorded + polls: AtomicU64, + + /// An approximation of the total CPU time spent in the context, in + /// nanoseconds + cpu_time: AtomicU64, +} + +impl LogContext { + /// Create a new log context with the given tag + pub fn new(tag: impl Into>) -> Self { + let tag = tag.into(); + let inner = LogContextInner { + tag, + index: LOG_CONTEXT_INDEX.fetch_add(1, Ordering::Relaxed), + start: Instant::now(), + polls: AtomicU64::new(0), + cpu_time: AtomicU64::new(0), + }; + + Self { + inner: Arc::new(inner), + } + } + + /// Run a closure with the current log context, if any + pub fn maybe_with(f: F) -> Option + where + F: FnOnce(&Self) -> R, + { + CURRENT_LOG_CONTEXT.try_with(f).ok() + } + + /// Run the async function `f` with the given log context. It will wrap the + /// output future to record poll and CPU statistics. + pub fn run Fut, Fut: Future>(&self, f: F) -> LogContextFuture { + let future = self.run_sync(f); + self.wrap_future(future) + } + + /// Run the sync function `f` with the given log context, recording the CPU + /// time spent. + pub fn run_sync R, R>(&self, f: F) -> R { + let start = Instant::now(); + let result = CURRENT_LOG_CONTEXT.sync_scope(self.clone(), f); + let elapsed = start.elapsed().as_nanos().try_into().unwrap_or(u64::MAX); + self.inner.cpu_time.fetch_add(elapsed, Ordering::Relaxed); + result + } + + /// Create a snapshot of the log context statistics + #[must_use] + pub fn stats(&self) -> LogContextStats { + let polls = self.inner.polls.load(Ordering::Relaxed); + let cpu_time = self.inner.cpu_time.load(Ordering::Relaxed); + let cpu_time = Duration::from_nanos(cpu_time); + let elapsed = self.inner.start.elapsed(); + LogContextStats { + polls, + cpu_time, + elapsed, + } + } +} + +impl std::fmt::Display for LogContext { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let tag = &self.inner.tag; + let index = self.inner.index; + write!(f, "{tag}-{index}") + } +} + +/// A snapshot of a log context statistics +#[derive(Debug, Clone, Copy)] +pub struct LogContextStats { + /// How many times the context was polled + pub polls: u64, + + /// The approximate CPU time spent in the context + pub cpu_time: Duration, + + /// How much time elapsed since the context was created + pub elapsed: Duration, +} + +impl std::fmt::Display for LogContextStats { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let polls = self.polls; + #[expect(clippy::cast_precision_loss)] + let cpu_time_ms = self.cpu_time.as_nanos() as f64 / 1_000_000.; + #[expect(clippy::cast_precision_loss)] + let elapsed_ms = self.elapsed.as_nanos() as f64 / 1_000_000.; + write!( + f, + "polls: {polls}, cpu: {cpu_time_ms:.1}ms, elapsed: {elapsed_ms:.1}ms", + ) + } +} diff --git a/matrix-authentication-service/crates/context/src/service.rs b/matrix-authentication-service/crates/context/src/service.rs new file mode 100644 index 00000000..8d875cc0 --- /dev/null +++ b/matrix-authentication-service/crates/context/src/service.rs @@ -0,0 +1,54 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + borrow::Cow, + task::{Context, Poll}, +}; + +use tower_service::Service; + +use crate::{LogContext, LogContextFuture}; + +/// A service which wraps another service and creates a log context for +/// each request. +pub struct LogContextService { + inner: S, + tagger: fn(&R) -> Cow<'static, str>, +} + +impl Clone for LogContextService { + fn clone(&self) -> Self { + Self { + inner: self.inner.clone(), + tagger: self.tagger, + } + } +} + +impl LogContextService { + pub fn new(inner: S, tagger: fn(&R) -> Cow<'static, str>) -> Self { + Self { inner, tagger } + } +} + +impl Service for LogContextService +where + S: Service, +{ + type Response = S::Response; + type Error = S::Error; + type Future = LogContextFuture; + + fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { + self.inner.poll_ready(cx) + } + + fn call(&mut self, req: R) -> Self::Future { + let tag = (self.tagger)(&req); + let log_context = LogContext::new(tag); + log_context.run(|| self.inner.call(req)) + } +} diff --git a/matrix-authentication-service/crates/data-model/Cargo.toml b/matrix-authentication-service/crates/data-model/Cargo.toml new file mode 100644 index 00000000..da7021b1 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/Cargo.toml @@ -0,0 +1,38 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-data-model" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +base64ct.workspace = true +chrono.workspace = true +thiserror.workspace = true +serde.workspace = true +serde_json.workspace = true +url.workspace = true +crc.workspace = true +ulid.workspace = true +rand.workspace = true +rand_chacha.workspace = true +regex.workspace = true +woothee.workspace = true +ruma-common.workspace = true +lettre.workspace = true + +mas-iana.workspace = true +mas-jose.workspace = true +oauth2-types.workspace = true + diff --git a/matrix-authentication-service/crates/data-model/examples/ua-parser.rs b/matrix-authentication-service/crates/data-model/examples/ua-parser.rs new file mode 100644 index 00000000..98a4d789 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/examples/ua-parser.rs @@ -0,0 +1,18 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_data_model::UserAgent; + +/// Simple command-line tool to try out user-agent parsing +/// +/// It parses user-agents from stdin and prints the parsed user-agent to stdout. +fn main() { + for line in std::io::stdin().lines() { + let user_agent = line.unwrap(); + let user_agent = UserAgent::parse(user_agent); + println!("{user_agent:?}"); + } +} diff --git a/matrix-authentication-service/crates/data-model/src/clock.rs b/matrix-authentication-service/crates/data-model/src/clock.rs new file mode 100644 index 00000000..bf31835f --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/clock.rs @@ -0,0 +1,119 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A [`Clock`] is a way to get the current date and time. +//! +//! This module defines two implemetation of the [`Clock`] trait: +//! [`SystemClock`] which uses the system time, and a [`MockClock`], which can +//! be used and freely manipulated in tests. + +use std::sync::{Arc, atomic::AtomicI64}; + +use chrono::{DateTime, TimeZone, Utc}; + +/// Represents a clock which can give the current date and time +pub trait Clock: Send + Sync { + /// Get the current date and time + fn now(&self) -> DateTime; +} + +impl Clock for Arc { + fn now(&self) -> DateTime { + (**self).now() + } +} + +impl Clock for Box { + fn now(&self) -> DateTime { + (**self).now() + } +} + +/// A clock which uses the system time +#[derive(Clone, Default)] +pub struct SystemClock { + _private: (), +} + +impl Clock for SystemClock { + fn now(&self) -> DateTime { + // This is the clock used elsewhere, it's fine to call Utc::now here + #[allow(clippy::disallowed_methods)] + Utc::now() + } +} + +/// A fake clock, which uses a fixed timestamp, and can be advanced with the +/// [`MockClock::advance`] method. +pub struct MockClock { + timestamp: AtomicI64, +} + +impl Default for MockClock { + fn default() -> Self { + let datetime = Utc.with_ymd_and_hms(2022, 1, 16, 14, 40, 0).unwrap(); + Self::new(datetime) + } +} + +impl MockClock { + /// Create a new clock which starts at the given datetime + #[must_use] + pub fn new(datetime: DateTime) -> Self { + let timestamp = AtomicI64::new(datetime.timestamp()); + Self { timestamp } + } + + /// Move the clock forward by the given amount of time + pub fn advance(&self, duration: chrono::Duration) { + self.timestamp + .fetch_add(duration.num_seconds(), std::sync::atomic::Ordering::Relaxed); + } +} + +impl Clock for MockClock { + fn now(&self) -> DateTime { + let timestamp = self.timestamp.load(std::sync::atomic::Ordering::Relaxed); + chrono::TimeZone::timestamp_opt(&Utc, timestamp, 0).unwrap() + } +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + + use super::*; + + #[test] + fn test_mocked_clock() { + let clock = MockClock::default(); + + // Time should be frozen, and give out the same timestamp on each call + let first = clock.now(); + std::thread::sleep(std::time::Duration::from_millis(10)); + let second = clock.now(); + + assert_eq!(first, second); + + // Clock can be advanced by a fixed duration + clock.advance(Duration::microseconds(10 * 1000 * 1000)); + let third = clock.now(); + assert_eq!(first + Duration::microseconds(10 * 1000 * 1000), third); + } + + #[test] + fn test_real_clock() { + let clock = SystemClock::default(); + + // Time should not be frozen + let first = clock.now(); + std::thread::sleep(std::time::Duration::from_millis(10)); + let second = clock.now(); + + assert_ne!(first, second); + assert!(first < second); + } +} diff --git a/matrix-authentication-service/crates/data-model/src/compat/device.rs b/matrix-authentication-service/crates/data-model/src/compat/device.rs new file mode 100644 index 00000000..e275b740 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/compat/device.rs @@ -0,0 +1,119 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use oauth2_types::scope::ScopeToken; +use rand::{ + RngCore, + distributions::{Alphanumeric, DistString}, +}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +static GENERATED_DEVICE_ID_LENGTH: usize = 10; +static UNSTABLE_DEVICE_SCOPE_PREFIX: &str = "urn:matrix:org.matrix.msc2967.client:device:"; +static STABLE_DEVICE_SCOPE_PREFIX: &str = "urn:matrix:client:device:"; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(transparent)] +pub struct Device { + id: String, +} + +#[derive(Debug, Error)] +pub enum ToScopeTokenError { + #[error("Device ID contains characters that can't be encoded in a scope")] + InvalidCharacters, +} + +impl Device { + /// Get the corresponding stable and unstable [`ScopeToken`] for that device + /// + /// # Errors + /// + /// Returns an error if the device ID contains characters that can't be + /// encoded in a scope + pub fn to_scope_token(&self) -> Result<[ScopeToken; 2], ToScopeTokenError> { + Ok([ + format!("{STABLE_DEVICE_SCOPE_PREFIX}{}", self.id) + .parse() + .map_err(|_| ToScopeTokenError::InvalidCharacters)?, + format!("{UNSTABLE_DEVICE_SCOPE_PREFIX}{}", self.id) + .parse() + .map_err(|_| ToScopeTokenError::InvalidCharacters)?, + ]) + } + + /// Get the corresponding [`Device`] from a [`ScopeToken`] + /// + /// Returns `None` if the [`ScopeToken`] is not a device scope + #[must_use] + pub fn from_scope_token(token: &ScopeToken) -> Option { + let stable = token.as_str().strip_prefix(STABLE_DEVICE_SCOPE_PREFIX); + let unstable = token.as_str().strip_prefix(UNSTABLE_DEVICE_SCOPE_PREFIX); + let id = stable.or(unstable)?; + Some(Device::from(id.to_owned())) + } + + /// Generate a random device ID + pub fn generate(rng: &mut R) -> Self { + let id: String = Alphanumeric.sample_string(rng, GENERATED_DEVICE_ID_LENGTH); + Self { id } + } + + /// Get the inner device ID as [`&str`] + #[must_use] + pub fn as_str(&self) -> &str { + &self.id + } +} + +impl From for Device { + fn from(id: String) -> Self { + Self { id } + } +} + +impl From for String { + fn from(device: Device) -> Self { + device.id + } +} + +impl std::fmt::Display for Device { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.id) + } +} + +#[cfg(test)] +mod test { + use oauth2_types::scope::OPENID; + + use crate::Device; + + #[test] + fn test_device_id_to_from_scope_token() { + let device = Device::from("AABBCCDDEE".to_owned()); + let [stable_scope_token, unstable_scope_token] = device.to_scope_token().unwrap(); + assert_eq!( + stable_scope_token.as_str(), + "urn:matrix:client:device:AABBCCDDEE" + ); + assert_eq!( + unstable_scope_token.as_str(), + "urn:matrix:org.matrix.msc2967.client:device:AABBCCDDEE" + ); + assert_eq!( + Device::from_scope_token(&unstable_scope_token).as_ref(), + Some(&device) + ); + assert_eq!( + Device::from_scope_token(&stable_scope_token).as_ref(), + Some(&device) + ); + assert_eq!(Device::from_scope_token(&OPENID), None); + } +} diff --git a/matrix-authentication-service/crates/data-model/src/compat/mod.rs b/matrix-authentication-service/crates/data-model/src/compat/mod.rs new file mode 100644 index 00000000..be38154f --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/compat/mod.rs @@ -0,0 +1,108 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use chrono::{DateTime, Utc}; +use ulid::Ulid; + +mod device; +mod session; +mod sso_login; + +pub use self::{ + device::{Device, ToScopeTokenError}, + session::{CompatSession, CompatSessionState}, + sso_login::{CompatSsoLogin, CompatSsoLoginState}, +}; +use crate::InvalidTransitionError; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CompatAccessToken { + pub id: Ulid, + pub session_id: Ulid, + pub token: String, + pub created_at: DateTime, + pub expires_at: Option>, +} + +impl CompatAccessToken { + #[must_use] + pub fn is_valid(&self, now: DateTime) -> bool { + if let Some(expires_at) = self.expires_at { + expires_at > now + } else { + true + } + } +} + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub enum CompatRefreshTokenState { + #[default] + Valid, + Consumed { + consumed_at: DateTime, + }, +} + +impl CompatRefreshTokenState { + /// Returns `true` if the compat refresh token state is [`Valid`]. + /// + /// [`Valid`]: CompatRefreshTokenState::Valid + #[must_use] + pub fn is_valid(&self) -> bool { + matches!(self, Self::Valid) + } + + /// Returns `true` if the compat refresh token state is [`Consumed`]. + /// + /// [`Consumed`]: CompatRefreshTokenState::Consumed + #[must_use] + pub fn is_consumed(&self) -> bool { + matches!(self, Self::Consumed { .. }) + } + + /// Consume the refresh token, returning a new state. + /// + /// # Errors + /// + /// Returns an error if the refresh token is already consumed. + pub fn consume(self, consumed_at: DateTime) -> Result { + match self { + Self::Valid => Ok(Self::Consumed { consumed_at }), + Self::Consumed { .. } => Err(InvalidTransitionError), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CompatRefreshToken { + pub id: Ulid, + pub state: CompatRefreshTokenState, + pub session_id: Ulid, + pub access_token_id: Ulid, + pub token: String, + pub created_at: DateTime, +} + +impl std::ops::Deref for CompatRefreshToken { + type Target = CompatRefreshTokenState; + + fn deref(&self) -> &Self::Target { + &self.state + } +} + +impl CompatRefreshToken { + /// Consume the refresh token and return the consumed token. + /// + /// # Errors + /// + /// Returns an error if the refresh token is already consumed. + pub fn consume(mut self, consumed_at: DateTime) -> Result { + self.state = self.state.consume(consumed_at)?; + Ok(self) + } +} diff --git a/matrix-authentication-service/crates/data-model/src/compat/session.rs b/matrix-authentication-service/crates/data-model/src/compat/session.rs new file mode 100644 index 00000000..24a8e18a --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/compat/session.rs @@ -0,0 +1,106 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use chrono::{DateTime, Utc}; +use serde::Serialize; +use ulid::Ulid; + +use super::Device; +use crate::InvalidTransitionError; + +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize)] +pub enum CompatSessionState { + #[default] + Valid, + Finished { + finished_at: DateTime, + }, +} + +impl CompatSessionState { + /// Returns `true` if the compat session state is [`Valid`]. + /// + /// [`Valid`]: CompatSessionState::Valid + #[must_use] + pub fn is_valid(&self) -> bool { + matches!(self, Self::Valid) + } + + /// Returns `true` if the compat session state is [`Finished`]. + /// + /// [`Finished`]: CompatSessionState::Finished + #[must_use] + pub fn is_finished(&self) -> bool { + matches!(self, Self::Finished { .. }) + } + + /// Transitions the session state to [`Finished`]. + /// + /// # Parameters + /// + /// * `finished_at` - The time at which the session was finished. + /// + /// # Errors + /// + /// Returns an error if the session state is already [`Finished`]. + /// + /// [`Finished`]: CompatSessionState::Finished + pub fn finish(self, finished_at: DateTime) -> Result { + match self { + Self::Valid => Ok(Self::Finished { finished_at }), + Self::Finished { .. } => Err(InvalidTransitionError), + } + } + + #[must_use] + pub fn finished_at(&self) -> Option> { + match self { + CompatSessionState::Valid => None, + CompatSessionState::Finished { finished_at } => Some(*finished_at), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct CompatSession { + pub id: Ulid, + pub state: CompatSessionState, + pub user_id: Ulid, + pub device: Option, + pub human_name: Option, + pub user_session_id: Option, + pub created_at: DateTime, + pub is_synapse_admin: bool, + pub user_agent: Option, + pub last_active_at: Option>, + pub last_active_ip: Option, +} + +impl std::ops::Deref for CompatSession { + type Target = CompatSessionState; + + fn deref(&self) -> &Self::Target { + &self.state + } +} + +impl CompatSession { + /// Marks the session as finished. + /// + /// # Parameters + /// + /// * `finished_at` - The time at which the session was finished. + /// + /// # Errors + /// + /// Returns an error if the session is already finished. + pub fn finish(mut self, finished_at: DateTime) -> Result { + self.state = self.state.finish(finished_at)?; + Ok(self) + } +} diff --git a/matrix-authentication-service/crates/data-model/src/compat/sso_login.rs b/matrix-authentication-service/crates/data-model/src/compat/sso_login.rs new file mode 100644 index 00000000..a42dfcb1 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/compat/sso_login.rs @@ -0,0 +1,204 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use chrono::{DateTime, Utc}; +use serde::Serialize; +use ulid::Ulid; +use url::Url; + +use super::CompatSession; +use crate::{BrowserSession, InvalidTransitionError}; + +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize)] +pub enum CompatSsoLoginState { + #[default] + Pending, + Fulfilled { + fulfilled_at: DateTime, + browser_session_id: Ulid, + }, + Exchanged { + fulfilled_at: DateTime, + exchanged_at: DateTime, + compat_session_id: Ulid, + }, +} + +impl CompatSsoLoginState { + /// Returns `true` if the compat SSO login state is [`Pending`]. + /// + /// [`Pending`]: CompatSsoLoginState::Pending + #[must_use] + pub fn is_pending(&self) -> bool { + matches!(self, Self::Pending) + } + + /// Returns `true` if the compat SSO login state is [`Fulfilled`]. + /// + /// [`Fulfilled`]: CompatSsoLoginState::Fulfilled + #[must_use] + pub fn is_fulfilled(&self) -> bool { + matches!(self, Self::Fulfilled { .. }) + } + + /// Returns `true` if the compat SSO login state is [`Exchanged`]. + /// + /// [`Exchanged`]: CompatSsoLoginState::Exchanged + #[must_use] + pub fn is_exchanged(&self) -> bool { + matches!(self, Self::Exchanged { .. }) + } + + /// Get the time at which the login was fulfilled. + /// + /// Returns `None` if the compat SSO login state is [`Pending`]. + /// + /// [`Pending`]: CompatSsoLoginState::Pending + #[must_use] + pub fn fulfilled_at(&self) -> Option> { + match self { + Self::Pending => None, + Self::Fulfilled { fulfilled_at, .. } | Self::Exchanged { fulfilled_at, .. } => { + Some(*fulfilled_at) + } + } + } + + /// Get the time at which the login was exchanged. + /// + /// Returns `None` if the compat SSO login state is not [`Exchanged`]. + /// + /// [`Exchanged`]: CompatSsoLoginState::Exchanged + #[must_use] + pub fn exchanged_at(&self) -> Option> { + match self { + Self::Pending | Self::Fulfilled { .. } => None, + Self::Exchanged { exchanged_at, .. } => Some(*exchanged_at), + } + } + + /// Get the compat session ID associated with the login. + /// + /// Returns `None` if the compat SSO login state is [`Pending`] or + /// [`Fulfilled`]. + /// + /// [`Pending`]: CompatSsoLoginState::Pending + /// [`Fulfilled`]: CompatSsoLoginState::Fulfilled + #[must_use] + pub fn session_id(&self) -> Option { + match self { + Self::Pending | Self::Fulfilled { .. } => None, + Self::Exchanged { + compat_session_id: session_id, + .. + } => Some(*session_id), + } + } + + /// Transition the compat SSO login state from [`Pending`] to [`Fulfilled`]. + /// + /// # Errors + /// + /// Returns an error if the compat SSO login state is not [`Pending`]. + /// + /// [`Pending`]: CompatSsoLoginState::Pending + /// [`Fulfilled`]: CompatSsoLoginState::Fulfilled + pub fn fulfill( + self, + fulfilled_at: DateTime, + browser_session: &BrowserSession, + ) -> Result { + match self { + Self::Pending => Ok(Self::Fulfilled { + fulfilled_at, + browser_session_id: browser_session.id, + }), + Self::Fulfilled { .. } | Self::Exchanged { .. } => Err(InvalidTransitionError), + } + } + + /// Transition the compat SSO login state from [`Fulfilled`] to + /// [`Exchanged`]. + /// + /// # Errors + /// + /// Returns an error if the compat SSO login state is not [`Fulfilled`]. + /// + /// [`Fulfilled`]: CompatSsoLoginState::Fulfilled + /// [`Exchanged`]: CompatSsoLoginState::Exchanged + pub fn exchange( + self, + exchanged_at: DateTime, + compat_session: &CompatSession, + ) -> Result { + match self { + Self::Fulfilled { + fulfilled_at, + browser_session_id: _, + } => Ok(Self::Exchanged { + fulfilled_at, + exchanged_at, + compat_session_id: compat_session.id, + }), + Self::Pending { .. } | Self::Exchanged { .. } => Err(InvalidTransitionError), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct CompatSsoLogin { + pub id: Ulid, + pub redirect_uri: Url, + pub login_token: String, + pub created_at: DateTime, + pub state: CompatSsoLoginState, +} + +impl std::ops::Deref for CompatSsoLogin { + type Target = CompatSsoLoginState; + + fn deref(&self) -> &Self::Target { + &self.state + } +} + +impl CompatSsoLogin { + /// Transition the compat SSO login from a [`Pending`] state to + /// [`Fulfilled`]. + /// + /// # Errors + /// + /// Returns an error if the compat SSO login state is not [`Pending`]. + /// + /// [`Pending`]: CompatSsoLoginState::Pending + /// [`Fulfilled`]: CompatSsoLoginState::Fulfilled + pub fn fulfill( + mut self, + fulfilled_at: DateTime, + browser_session: &BrowserSession, + ) -> Result { + self.state = self.state.fulfill(fulfilled_at, browser_session)?; + Ok(self) + } + + /// Transition the compat SSO login from a [`Fulfilled`] state to + /// [`Exchanged`]. + /// + /// # Errors + /// + /// Returns an error if the compat SSO login state is not [`Fulfilled`]. + /// + /// [`Fulfilled`]: CompatSsoLoginState::Fulfilled + /// [`Exchanged`]: CompatSsoLoginState::Exchanged + pub fn exchange( + mut self, + exchanged_at: DateTime, + compat_session: &CompatSession, + ) -> Result { + self.state = self.state.exchange(exchanged_at, compat_session)?; + Ok(self) + } +} diff --git a/matrix-authentication-service/crates/data-model/src/lib.rs b/matrix-authentication-service/crates/data-model/src/lib.rs new file mode 100644 index 00000000..05b2466b --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/lib.rs @@ -0,0 +1,65 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::module_name_repetitions)] + +use thiserror::Error; + +pub mod clock; +pub(crate) mod compat; +pub mod oauth2; +pub mod personal; +pub(crate) mod policy_data; +mod site_config; +pub(crate) mod tokens; +pub(crate) mod upstream_oauth2; +pub(crate) mod user_agent; +pub(crate) mod users; +mod utils; +mod version; + +/// Error when an invalid state transition is attempted. +#[derive(Debug, Error)] +#[error("invalid state transition")] +pub struct InvalidTransitionError; + +pub use ulid::Ulid; + +pub use self::{ + clock::{Clock, SystemClock}, + compat::{ + CompatAccessToken, CompatRefreshToken, CompatRefreshTokenState, CompatSession, + CompatSessionState, CompatSsoLogin, CompatSsoLoginState, Device, ToScopeTokenError, + }, + oauth2::{ + AuthorizationCode, AuthorizationGrant, AuthorizationGrantStage, Client, DeviceCodeGrant, + DeviceCodeGrantState, InvalidRedirectUriError, JwksOrJwksUri, Pkce, Session, SessionState, + }, + policy_data::PolicyData, + site_config::{ + CaptchaConfig, CaptchaService, SessionExpirationConfig, SessionLimitConfig, SiteConfig, + }, + tokens::{ + AccessToken, AccessTokenState, RefreshToken, RefreshTokenState, TokenFormatError, TokenType, + }, + upstream_oauth2::{ + UpstreamOAuthAuthorizationSession, UpstreamOAuthAuthorizationSessionState, + UpstreamOAuthLink, UpstreamOAuthProvider, UpstreamOAuthProviderClaimsImports, + UpstreamOAuthProviderDiscoveryMode, UpstreamOAuthProviderImportAction, + UpstreamOAuthProviderImportPreference, UpstreamOAuthProviderLocalpartPreference, + UpstreamOAuthProviderOnBackchannelLogout, UpstreamOAuthProviderOnConflict, + UpstreamOAuthProviderPkceMode, UpstreamOAuthProviderResponseMode, + UpstreamOAuthProviderSubjectPreference, UpstreamOAuthProviderTokenAuthMethod, + }, + user_agent::{DeviceType, UserAgent}, + users::{ + Authentication, AuthenticationMethod, BrowserSession, MatrixUser, Password, User, + UserEmail, UserEmailAuthentication, UserEmailAuthenticationCode, UserRecoverySession, + UserRecoveryTicket, UserRegistration, UserRegistrationPassword, UserRegistrationToken, + }, + utils::{BoxClock, BoxRng}, + version::AppVersion, +}; diff --git a/matrix-authentication-service/crates/data-model/src/oauth2/authorization_grant.rs b/matrix-authentication-service/crates/data-model/src/oauth2/authorization_grant.rs new file mode 100644 index 00000000..738277b8 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/oauth2/authorization_grant.rs @@ -0,0 +1,366 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::str::FromStr as _; + +use chrono::{DateTime, Utc}; +use mas_iana::oauth::PkceCodeChallengeMethod; +use oauth2_types::{ + pkce::{CodeChallengeError, CodeChallengeMethodExt}, + requests::ResponseMode, + scope::{OPENID, PROFILE, Scope}, +}; +use rand::{ + RngCore, + distributions::{Alphanumeric, DistString}, +}; +use ruma_common::UserId; +use serde::Serialize; +use ulid::Ulid; +use url::Url; + +use super::session::Session; +use crate::InvalidTransitionError; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct Pkce { + pub challenge_method: PkceCodeChallengeMethod, + pub challenge: String, +} + +impl Pkce { + /// Create a new PKCE challenge, with the given method and challenge. + #[must_use] + pub fn new(challenge_method: PkceCodeChallengeMethod, challenge: String) -> Self { + Pkce { + challenge_method, + challenge, + } + } + + /// Verify the PKCE challenge. + /// + /// # Errors + /// + /// Returns an error if the verifier is invalid. + pub fn verify(&self, verifier: &str) -> Result<(), CodeChallengeError> { + self.challenge_method.verify(&self.challenge, verifier) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct AuthorizationCode { + pub code: String, + pub pkce: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Default)] +#[serde(tag = "stage", rename_all = "lowercase")] +pub enum AuthorizationGrantStage { + #[default] + Pending, + Fulfilled { + session_id: Ulid, + fulfilled_at: DateTime, + }, + Exchanged { + session_id: Ulid, + fulfilled_at: DateTime, + exchanged_at: DateTime, + }, + Cancelled { + cancelled_at: DateTime, + }, +} + +impl AuthorizationGrantStage { + #[must_use] + pub fn new() -> Self { + Self::Pending + } + + fn fulfill( + self, + fulfilled_at: DateTime, + session: &Session, + ) -> Result { + match self { + Self::Pending => Ok(Self::Fulfilled { + fulfilled_at, + session_id: session.id, + }), + _ => Err(InvalidTransitionError), + } + } + + fn exchange(self, exchanged_at: DateTime) -> Result { + match self { + Self::Fulfilled { + fulfilled_at, + session_id, + } => Ok(Self::Exchanged { + fulfilled_at, + exchanged_at, + session_id, + }), + _ => Err(InvalidTransitionError), + } + } + + fn cancel(self, cancelled_at: DateTime) -> Result { + match self { + Self::Pending => Ok(Self::Cancelled { cancelled_at }), + _ => Err(InvalidTransitionError), + } + } + + /// Returns `true` if the authorization grant stage is [`Pending`]. + /// + /// [`Pending`]: AuthorizationGrantStage::Pending + #[must_use] + pub fn is_pending(&self) -> bool { + matches!(self, Self::Pending) + } + + /// Returns `true` if the authorization grant stage is [`Fulfilled`]. + /// + /// [`Fulfilled`]: AuthorizationGrantStage::Fulfilled + #[must_use] + pub fn is_fulfilled(&self) -> bool { + matches!(self, Self::Fulfilled { .. }) + } + + /// Returns `true` if the authorization grant stage is [`Exchanged`]. + /// + /// [`Exchanged`]: AuthorizationGrantStage::Exchanged + #[must_use] + pub fn is_exchanged(&self) -> bool { + matches!(self, Self::Exchanged { .. }) + } +} + +pub enum LoginHint<'a> { + MXID(&'a UserId), + Email(lettre::Address), + None, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct AuthorizationGrant { + pub id: Ulid, + #[serde(flatten)] + pub stage: AuthorizationGrantStage, + pub code: Option, + pub client_id: Ulid, + pub redirect_uri: Url, + pub scope: Scope, + pub state: Option, + pub nonce: Option, + pub response_mode: ResponseMode, + pub response_type_id_token: bool, + pub created_at: DateTime, + pub login_hint: Option, + pub locale: Option, +} + +impl std::ops::Deref for AuthorizationGrant { + type Target = AuthorizationGrantStage; + + fn deref(&self) -> &Self::Target { + &self.stage + } +} + +impl AuthorizationGrant { + /// Parse a `login_hint` + /// + /// Returns `LoginHint::MXID` for valid mxid 'mxid:@john.doe:example.com' + /// + /// Returns `LoginHint::Email` for valid email 'john.doe@example.com' + /// + /// Otherwise returns `LoginHint::None` + #[must_use] + pub fn parse_login_hint(&self, homeserver: &str) -> LoginHint<'_> { + let Some(login_hint) = &self.login_hint else { + return LoginHint::None; + }; + + if let Some(value) = login_hint.strip_prefix("mxid:") + && let Ok(mxid) = <&UserId>::try_from(value) + && mxid.server_name() == homeserver + { + LoginHint::MXID(mxid) + } else if let Ok(email) = lettre::Address::from_str(login_hint) { + LoginHint::Email(email) + } else { + LoginHint::None + } + } + + /// Mark the authorization grant as exchanged. + /// + /// # Errors + /// + /// Returns an error if the authorization grant is not [`Fulfilled`]. + /// + /// [`Fulfilled`]: AuthorizationGrantStage::Fulfilled + pub fn exchange(mut self, exchanged_at: DateTime) -> Result { + self.stage = self.stage.exchange(exchanged_at)?; + Ok(self) + } + + /// Mark the authorization grant as fulfilled. + /// + /// # Errors + /// + /// Returns an error if the authorization grant is not [`Pending`]. + /// + /// [`Pending`]: AuthorizationGrantStage::Pending + pub fn fulfill( + mut self, + fulfilled_at: DateTime, + session: &Session, + ) -> Result { + self.stage = self.stage.fulfill(fulfilled_at, session)?; + Ok(self) + } + + /// Mark the authorization grant as cancelled. + /// + /// # Errors + /// + /// Returns an error if the authorization grant is not [`Pending`]. + /// + /// [`Pending`]: AuthorizationGrantStage::Pending + /// + /// # TODO + /// + /// This appears to be unused + pub fn cancel(mut self, canceld_at: DateTime) -> Result { + self.stage = self.stage.cancel(canceld_at)?; + Ok(self) + } + + #[doc(hidden)] + pub fn sample(now: DateTime, rng: &mut impl RngCore) -> Self { + Self { + id: Ulid::from_datetime_with_source(now.into(), rng), + stage: AuthorizationGrantStage::Pending, + code: Some(AuthorizationCode { + code: Alphanumeric.sample_string(rng, 10), + pkce: None, + }), + client_id: Ulid::from_datetime_with_source(now.into(), rng), + redirect_uri: Url::parse("http://localhost:8080").unwrap(), + scope: Scope::from_iter([OPENID, PROFILE]), + state: Some(Alphanumeric.sample_string(rng, 10)), + nonce: Some(Alphanumeric.sample_string(rng, 10)), + response_mode: ResponseMode::Query, + response_type_id_token: false, + created_at: now, + login_hint: Some(String::from("mxid:@example-user:example.com")), + locale: Some(String::from("fr")), + } + } +} + +#[cfg(test)] +mod tests { + use rand::SeedableRng; + + use super::*; + use crate::clock::{Clock, MockClock}; + + #[test] + fn no_login_hint() { + let now = MockClock::default().now(); + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + + let grant = AuthorizationGrant { + login_hint: None, + ..AuthorizationGrant::sample(now, &mut rng) + }; + + let hint = grant.parse_login_hint("example.com"); + + assert!(matches!(hint, LoginHint::None)); + } + + #[test] + fn valid_login_hint() { + let now = MockClock::default().now(); + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + + let grant = AuthorizationGrant { + login_hint: Some(String::from("mxid:@example-user:example.com")), + ..AuthorizationGrant::sample(now, &mut rng) + }; + + let hint = grant.parse_login_hint("example.com"); + + assert!(matches!(hint, LoginHint::MXID(mxid) if mxid.localpart() == "example-user")); + } + + #[test] + fn valid_login_hint_with_email() { + let now = MockClock::default().now(); + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + + let grant = AuthorizationGrant { + login_hint: Some(String::from("example@user")), + ..AuthorizationGrant::sample(now, &mut rng) + }; + + let hint = grant.parse_login_hint("example.com"); + + assert!(matches!(hint, LoginHint::Email(email) if email.to_string() == "example@user")); + } + + #[test] + fn invalid_login_hint() { + let now = MockClock::default().now(); + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + + let grant = AuthorizationGrant { + login_hint: Some(String::from("example-user")), + ..AuthorizationGrant::sample(now, &mut rng) + }; + + let hint = grant.parse_login_hint("example.com"); + + assert!(matches!(hint, LoginHint::None)); + } + + #[test] + fn valid_login_hint_for_wrong_homeserver() { + let now = MockClock::default().now(); + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + + let grant = AuthorizationGrant { + login_hint: Some(String::from("mxid:@example-user:matrix.org")), + ..AuthorizationGrant::sample(now, &mut rng) + }; + + let hint = grant.parse_login_hint("example.com"); + + assert!(matches!(hint, LoginHint::None)); + } + + #[test] + fn unknown_login_hint_type() { + let now = MockClock::default().now(); + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + + let grant = AuthorizationGrant { + login_hint: Some(String::from("something:anything")), + ..AuthorizationGrant::sample(now, &mut rng) + }; + + let hint = grant.parse_login_hint("example.com"); + + assert!(matches!(hint, LoginHint::None)); + } +} diff --git a/matrix-authentication-service/crates/data-model/src/oauth2/client.rs b/matrix-authentication-service/crates/data-model/src/oauth2/client.rs new file mode 100644 index 00000000..ce28445f --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/oauth2/client.rs @@ -0,0 +1,286 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use chrono::{DateTime, Utc}; +use mas_iana::{jose::JsonWebSignatureAlg, oauth::OAuthClientAuthenticationMethod}; +use mas_jose::jwk::PublicJsonWebKeySet; +use oauth2_types::{ + oidc::ApplicationType, + registration::{ClientMetadata, Localized}, + requests::GrantType, +}; +use rand::RngCore; +use serde::Serialize; +use thiserror::Error; +use ulid::Ulid; +use url::Url; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum JwksOrJwksUri { + /// Client's JSON Web Key Set document, passed by value. + Jwks(PublicJsonWebKeySet), + + /// URL for the Client's JSON Web Key Set document. + JwksUri(Url), +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct Client { + pub id: Ulid, + + /// Client identifier + pub client_id: String, + + /// Hash of the client metadata + pub metadata_digest: Option, + + pub encrypted_client_secret: Option, + + pub application_type: Option, + + /// Array of Redirection URI values used by the Client + pub redirect_uris: Vec, + + /// Array containing a list of the OAuth 2.0 Grant Types that the Client is + /// declaring that it will restrict itself to using. + pub grant_types: Vec, + + /// Name of the Client to be presented to the End-User + pub client_name: Option, // TODO: translations + + /// URL that references a logo for the Client application + pub logo_uri: Option, // TODO: translations + + /// URL of the home page of the Client + pub client_uri: Option, // TODO: translations + + /// URL that the Relying Party Client provides to the End-User to read about + /// the how the profile data will be used + pub policy_uri: Option, // TODO: translations + + /// URL that the Relying Party Client provides to the End-User to read about + /// the Relying Party's terms of service + pub tos_uri: Option, // TODO: translations + + pub jwks: Option, + + /// JWS alg algorithm REQUIRED for signing the ID Token issued to this + /// Client + pub id_token_signed_response_alg: Option, + + /// JWS alg algorithm REQUIRED for signing `UserInfo` Responses. + pub userinfo_signed_response_alg: Option, + + /// Requested authentication method for the token endpoint + pub token_endpoint_auth_method: Option, + + /// JWS alg algorithm that MUST be used for signing the JWT used to + /// authenticate the Client at the Token Endpoint for the `private_key_jwt` + /// and `client_secret_jwt` authentication methods + pub token_endpoint_auth_signing_alg: Option, + + /// URI using the https scheme that a third party can use to initiate a + /// login by the RP + pub initiate_login_uri: Option, +} + +#[derive(Debug, Error)] +pub enum InvalidRedirectUriError { + #[error("redirect_uri is not allowed for this client")] + NotAllowed, + + #[error("multiple redirect_uris registered for this client")] + MultipleRegistered, + + #[error("client has no redirect_uri registered")] + NoneRegistered, +} + +impl Client { + /// Determine which redirect URI to use for the given request. + /// + /// # Errors + /// + /// Returns an error if: + /// + /// - no URL was given but multiple redirect URIs are registered, + /// - no URL was registered, or + /// - the given URL is not registered + pub fn resolve_redirect_uri<'a>( + &'a self, + redirect_uri: &'a Option, + ) -> Result<&'a Url, InvalidRedirectUriError> { + match (&self.redirect_uris[..], redirect_uri) { + ([], _) => Err(InvalidRedirectUriError::NoneRegistered), + ([one], None) => Ok(one), + (_, None) => Err(InvalidRedirectUriError::MultipleRegistered), + (uris, Some(uri)) if uri_matches_one_of(uri, uris) => Ok(uri), + _ => Err(InvalidRedirectUriError::NotAllowed), + } + } + + /// Create a client metadata object for this client + #[must_use] + pub fn into_metadata(self) -> ClientMetadata { + let (jwks, jwks_uri) = match self.jwks { + Some(JwksOrJwksUri::Jwks(jwks)) => (Some(jwks), None), + Some(JwksOrJwksUri::JwksUri(jwks_uri)) => (None, Some(jwks_uri)), + _ => (None, None), + }; + ClientMetadata { + redirect_uris: Some(self.redirect_uris.clone()), + response_types: None, + grant_types: Some(self.grant_types.clone()), + application_type: self.application_type.clone(), + client_name: self.client_name.map(|n| Localized::new(n, [])), + logo_uri: self.logo_uri.map(|n| Localized::new(n, [])), + client_uri: self.client_uri.map(|n| Localized::new(n, [])), + policy_uri: self.policy_uri.map(|n| Localized::new(n, [])), + tos_uri: self.tos_uri.map(|n| Localized::new(n, [])), + jwks_uri, + jwks, + id_token_signed_response_alg: self.id_token_signed_response_alg, + userinfo_signed_response_alg: self.userinfo_signed_response_alg, + token_endpoint_auth_method: self.token_endpoint_auth_method, + token_endpoint_auth_signing_alg: self.token_endpoint_auth_signing_alg, + initiate_login_uri: self.initiate_login_uri, + contacts: None, + software_id: None, + software_version: None, + sector_identifier_uri: None, + subject_type: None, + id_token_encrypted_response_alg: None, + id_token_encrypted_response_enc: None, + userinfo_encrypted_response_alg: None, + userinfo_encrypted_response_enc: None, + request_object_signing_alg: None, + request_object_encryption_alg: None, + request_object_encryption_enc: None, + default_max_age: None, + require_auth_time: None, + default_acr_values: None, + request_uris: None, + require_signed_request_object: None, + require_pushed_authorization_requests: None, + introspection_signed_response_alg: None, + introspection_encrypted_response_alg: None, + introspection_encrypted_response_enc: None, + post_logout_redirect_uris: None, + } + } + + #[doc(hidden)] + pub fn samples(now: DateTime, rng: &mut impl RngCore) -> Vec { + vec![ + // A client with all the URIs set + Self { + id: Ulid::from_datetime_with_source(now.into(), rng), + client_id: "client1".to_owned(), + metadata_digest: None, + encrypted_client_secret: None, + application_type: Some(ApplicationType::Web), + redirect_uris: vec![ + Url::parse("https://client1.example.com/redirect").unwrap(), + Url::parse("https://client1.example.com/redirect2").unwrap(), + ], + grant_types: vec![GrantType::AuthorizationCode, GrantType::RefreshToken], + client_name: Some("Client 1".to_owned()), + client_uri: Some(Url::parse("https://client1.example.com").unwrap()), + logo_uri: Some(Url::parse("https://client1.example.com/logo.png").unwrap()), + tos_uri: Some(Url::parse("https://client1.example.com/tos").unwrap()), + policy_uri: Some(Url::parse("https://client1.example.com/policy").unwrap()), + initiate_login_uri: Some( + Url::parse("https://client1.example.com/initiate-login").unwrap(), + ), + token_endpoint_auth_method: Some(OAuthClientAuthenticationMethod::None), + token_endpoint_auth_signing_alg: None, + id_token_signed_response_alg: None, + userinfo_signed_response_alg: None, + jwks: None, + }, + // Another client without any URIs set + Self { + id: Ulid::from_datetime_with_source(now.into(), rng), + client_id: "client2".to_owned(), + metadata_digest: None, + encrypted_client_secret: None, + application_type: Some(ApplicationType::Native), + redirect_uris: vec![Url::parse("https://client2.example.com/redirect").unwrap()], + grant_types: vec![GrantType::AuthorizationCode, GrantType::RefreshToken], + client_name: None, + client_uri: None, + logo_uri: None, + tos_uri: None, + policy_uri: None, + initiate_login_uri: None, + token_endpoint_auth_method: None, + token_endpoint_auth_signing_alg: None, + id_token_signed_response_alg: None, + userinfo_signed_response_alg: None, + jwks: None, + }, + ] + } +} + +/// The hosts that match the loopback interface. +const LOCAL_HOSTS: &[&str] = &["localhost", "127.0.0.1", "[::1]"]; + +/// Whether the given URI matches one of the registered URIs. +/// +/// If the URI host is one if `localhost`, `127.0.0.1` or `[::1]`, any port is +/// accepted. +fn uri_matches_one_of(uri: &Url, registered_uris: &[Url]) -> bool { + if LOCAL_HOSTS.contains(&uri.host_str().unwrap_or_default()) { + let mut uri = uri.clone(); + // Try matching without the port first + if uri.set_port(None).is_ok() && registered_uris.contains(&uri) { + return true; + } + } + + registered_uris.contains(uri) +} + +#[cfg(test)] +mod tests { + use url::Url; + + use super::*; + + #[test] + fn test_uri_matches_one_of() { + let registered_uris = &[ + Url::parse("http://127.0.0.1").unwrap(), + Url::parse("https://example.org").unwrap(), + ]; + + // Non-loopback interface URIs. + assert!(uri_matches_one_of( + &Url::parse("https://example.org").unwrap(), + registered_uris + )); + assert!(!uri_matches_one_of( + &Url::parse("https://example.org:8080").unwrap(), + registered_uris + )); + + // Loopback interface URIS. + assert!(uri_matches_one_of( + &Url::parse("http://127.0.0.1").unwrap(), + registered_uris + )); + assert!(uri_matches_one_of( + &Url::parse("http://127.0.0.1:8080").unwrap(), + registered_uris + )); + assert!(!uri_matches_one_of( + &Url::parse("http://localhost").unwrap(), + registered_uris + )); + } +} diff --git a/matrix-authentication-service/crates/data-model/src/oauth2/device_code_grant.rs b/matrix-authentication-service/crates/data-model/src/oauth2/device_code_grant.rs new file mode 100644 index 00000000..aaf7df59 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/oauth2/device_code_grant.rs @@ -0,0 +1,262 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use chrono::{DateTime, Utc}; +use oauth2_types::scope::Scope; +use serde::Serialize; +use ulid::Ulid; + +use crate::{BrowserSession, InvalidTransitionError, Session}; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +#[serde(rename_all = "snake_case", tag = "state")] +pub enum DeviceCodeGrantState { + /// The device code grant is pending. + Pending, + + /// The device code grant has been fulfilled by a user. + Fulfilled { + /// The browser session which was used to complete this device code + /// grant. + browser_session_id: Ulid, + + /// The time at which this device code grant was fulfilled. + fulfilled_at: DateTime, + }, + + /// The device code grant has been rejected by a user. + Rejected { + /// The browser session which was used to reject this device code grant. + browser_session_id: Ulid, + + /// The time at which this device code grant was rejected. + rejected_at: DateTime, + }, + + /// The device code grant was exchanged for an access token. + Exchanged { + /// The browser session which was used to exchange this device code + /// grant. + browser_session_id: Ulid, + + /// The time at which the device code grant was fulfilled. + fulfilled_at: DateTime, + + /// The time at which this device code grant was exchanged. + exchanged_at: DateTime, + + /// The OAuth 2.0 session ID which was created by this device code + /// grant. + session_id: Ulid, + }, +} + +impl DeviceCodeGrantState { + /// Mark this device code grant as fulfilled, returning a new state. + /// + /// # Errors + /// + /// Returns an error if the device code grant is not in the [`Pending`] + /// state. + /// + /// [`Pending`]: DeviceCodeGrantState::Pending + pub fn fulfill( + self, + browser_session: &BrowserSession, + fulfilled_at: DateTime, + ) -> Result { + match self { + DeviceCodeGrantState::Pending => Ok(DeviceCodeGrantState::Fulfilled { + browser_session_id: browser_session.id, + fulfilled_at, + }), + _ => Err(InvalidTransitionError), + } + } + + /// Mark this device code grant as rejected, returning a new state. + /// + /// # Errors + /// + /// Returns an error if the device code grant is not in the [`Pending`] + /// state. + /// + /// [`Pending`]: DeviceCodeGrantState::Pending + pub fn reject( + self, + browser_session: &BrowserSession, + rejected_at: DateTime, + ) -> Result { + match self { + DeviceCodeGrantState::Pending => Ok(DeviceCodeGrantState::Rejected { + browser_session_id: browser_session.id, + rejected_at, + }), + _ => Err(InvalidTransitionError), + } + } + + /// Mark this device code grant as exchanged, returning a new state. + /// + /// # Errors + /// + /// Returns an error if the device code grant is not in the [`Fulfilled`] + /// state. + /// + /// [`Fulfilled`]: DeviceCodeGrantState::Fulfilled + pub fn exchange( + self, + session: &Session, + exchanged_at: DateTime, + ) -> Result { + match self { + DeviceCodeGrantState::Fulfilled { + fulfilled_at, + browser_session_id, + .. + } => Ok(DeviceCodeGrantState::Exchanged { + browser_session_id, + fulfilled_at, + exchanged_at, + session_id: session.id, + }), + _ => Err(InvalidTransitionError), + } + } + + /// Returns `true` if the device code grant state is [`Pending`]. + /// + /// [`Pending`]: DeviceCodeGrantState::Pending + #[must_use] + pub fn is_pending(&self) -> bool { + matches!(self, Self::Pending) + } + + /// Returns `true` if the device code grant state is [`Fulfilled`]. + /// + /// [`Fulfilled`]: DeviceCodeGrantState::Fulfilled + #[must_use] + pub fn is_fulfilled(&self) -> bool { + matches!(self, Self::Fulfilled { .. }) + } + + /// Returns `true` if the device code grant state is [`Rejected`]. + /// + /// [`Rejected`]: DeviceCodeGrantState::Rejected + #[must_use] + pub fn is_rejected(&self) -> bool { + matches!(self, Self::Rejected { .. }) + } + + /// Returns `true` if the device code grant state is [`Exchanged`]. + /// + /// [`Exchanged`]: DeviceCodeGrantState::Exchanged + #[must_use] + pub fn is_exchanged(&self) -> bool { + matches!(self, Self::Exchanged { .. }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct DeviceCodeGrant { + pub id: Ulid, + #[serde(flatten)] + pub state: DeviceCodeGrantState, + + /// The client ID which requested this device code grant. + pub client_id: Ulid, + + /// The scope which was requested by this device code grant. + pub scope: Scope, + + /// The user code which was generated for this device code grant. + /// This is the one that the user will enter into their client. + pub user_code: String, + + /// The device code which was generated for this device code grant. + /// This is the one that the client will use to poll for an access token. + pub device_code: String, + + /// The time at which this device code grant was created. + pub created_at: DateTime, + + /// The time at which this device code grant will expire. + pub expires_at: DateTime, + + /// The IP address of the client which requested this device code grant. + pub ip_address: Option, + + /// The user agent used to request this device code grant. + pub user_agent: Option, +} + +impl std::ops::Deref for DeviceCodeGrant { + type Target = DeviceCodeGrantState; + + fn deref(&self) -> &Self::Target { + &self.state + } +} + +impl DeviceCodeGrant { + /// Mark this device code grant as fulfilled, returning the updated grant. + /// + /// # Errors + /// + /// Returns an error if the device code grant is not in the [`Pending`] + /// state. + /// + /// [`Pending`]: DeviceCodeGrantState::Pending + pub fn fulfill( + self, + browser_session: &BrowserSession, + fulfilled_at: DateTime, + ) -> Result { + Ok(Self { + state: self.state.fulfill(browser_session, fulfilled_at)?, + ..self + }) + } + + /// Mark this device code grant as rejected, returning the updated grant. + /// + /// # Errors + /// + /// Returns an error if the device code grant is not in the [`Pending`] + /// + /// [`Pending`]: DeviceCodeGrantState::Pending + pub fn reject( + self, + browser_session: &BrowserSession, + rejected_at: DateTime, + ) -> Result { + Ok(Self { + state: self.state.reject(browser_session, rejected_at)?, + ..self + }) + } + + /// Mark this device code grant as exchanged, returning the updated grant. + /// + /// # Errors + /// + /// Returns an error if the device code grant is not in the [`Fulfilled`] + /// state. + /// + /// [`Fulfilled`]: DeviceCodeGrantState::Fulfilled + pub fn exchange( + self, + session: &Session, + exchanged_at: DateTime, + ) -> Result { + Ok(Self { + state: self.state.exchange(session, exchanged_at)?, + ..self + }) + } +} diff --git a/matrix-authentication-service/crates/data-model/src/oauth2/mod.rs b/matrix-authentication-service/crates/data-model/src/oauth2/mod.rs new file mode 100644 index 00000000..6221a32f --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/oauth2/mod.rs @@ -0,0 +1,19 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod authorization_grant; +mod client; +mod device_code_grant; +mod session; + +pub use self::{ + authorization_grant::{ + AuthorizationCode, AuthorizationGrant, AuthorizationGrantStage, LoginHint, Pkce, + }, + client::{Client, InvalidRedirectUriError, JwksOrJwksUri}, + device_code_grant::{DeviceCodeGrant, DeviceCodeGrantState}, + session::{Session, SessionState}, +}; diff --git a/matrix-authentication-service/crates/data-model/src/oauth2/session.rs b/matrix-authentication-service/crates/data-model/src/oauth2/session.rs new file mode 100644 index 00000000..c6c9346e --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/oauth2/session.rs @@ -0,0 +1,111 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use chrono::{DateTime, Utc}; +use oauth2_types::scope::Scope; +use serde::Serialize; +use ulid::Ulid; + +use crate::InvalidTransitionError; + +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize)] +pub enum SessionState { + #[default] + Valid, + Finished { + finished_at: DateTime, + }, +} + +impl SessionState { + /// Returns `true` if the session state is [`Valid`]. + /// + /// [`Valid`]: SessionState::Valid + #[must_use] + pub fn is_valid(&self) -> bool { + matches!(self, Self::Valid) + } + + /// Returns `true` if the session state is [`Finished`]. + /// + /// [`Finished`]: SessionState::Finished + #[must_use] + pub fn is_finished(&self) -> bool { + matches!(self, Self::Finished { .. }) + } + + /// Transitions the session state to [`Finished`]. + /// + /// # Parameters + /// + /// * `finished_at` - The time at which the session was finished. + /// + /// # Errors + /// + /// Returns an error if the session state is already [`Finished`]. + /// + /// [`Finished`]: SessionState::Finished + pub fn finish(self, finished_at: DateTime) -> Result { + match self { + Self::Valid => Ok(Self::Finished { finished_at }), + Self::Finished { .. } => Err(InvalidTransitionError), + } + } + + /// Returns the time the session was finished, if any + /// + /// Returns `None` if the session is still [`Valid`]. + /// + /// [`Valid`]: SessionState::Valid + #[must_use] + pub fn finished_at(&self) -> Option> { + match self { + Self::Valid => None, + Self::Finished { finished_at } => Some(*finished_at), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct Session { + pub id: Ulid, + pub state: SessionState, + pub created_at: DateTime, + pub user_id: Option, + pub user_session_id: Option, + pub client_id: Ulid, + pub scope: Scope, + pub user_agent: Option, + pub last_active_at: Option>, + pub last_active_ip: Option, + pub human_name: Option, +} + +impl std::ops::Deref for Session { + type Target = SessionState; + + fn deref(&self) -> &Self::Target { + &self.state + } +} + +impl Session { + /// Marks the session as finished. + /// + /// # Parameters + /// + /// * `finished_at` - The time at which the session was finished. + /// + /// # Errors + /// + /// Returns an error if the session is already finished. + pub fn finish(mut self, finished_at: DateTime) -> Result { + self.state = self.state.finish(finished_at)?; + Ok(self) + } +} diff --git a/matrix-authentication-service/crates/data-model/src/personal/mod.rs b/matrix-authentication-service/crates/data-model/src/personal/mod.rs new file mode 100644 index 00000000..1142fea7 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/personal/mod.rs @@ -0,0 +1,32 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +pub mod session; + +use chrono::{DateTime, Utc}; +use ulid::Ulid; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PersonalAccessToken { + pub id: Ulid, + pub session_id: Ulid, + pub created_at: DateTime, + pub expires_at: Option>, + pub revoked_at: Option>, +} + +impl PersonalAccessToken { + #[must_use] + pub fn is_valid(&self, now: DateTime) -> bool { + if self.revoked_at.is_some() { + return false; + } + if let Some(expires_at) = self.expires_at { + expires_at > now + } else { + true + } + } +} diff --git a/matrix-authentication-service/crates/data-model/src/personal/session.rs b/matrix-authentication-service/crates/data-model/src/personal/session.rs new file mode 100644 index 00000000..f3c8d34f --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/personal/session.rs @@ -0,0 +1,141 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use chrono::{DateTime, Utc}; +use oauth2_types::scope::Scope; +use serde::Serialize; +use ulid::Ulid; + +use crate::{Client, Device, InvalidTransitionError, User}; + +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize)] +pub enum SessionState { + #[default] + Valid, + Revoked { + revoked_at: DateTime, + }, +} + +impl SessionState { + /// Returns `true` if the session state is [`Valid`]. + /// + /// [`Valid`]: SessionState::Valid + #[must_use] + pub fn is_valid(&self) -> bool { + matches!(self, Self::Valid) + } + + /// Returns `true` if the session state is [`Revoked`]. + /// + /// [`Revoked`]: SessionState::Revoked + #[must_use] + pub fn is_revoked(&self) -> bool { + matches!(self, Self::Revoked { .. }) + } + + /// Transitions the session state to [`Revoked`]. + /// + /// # Parameters + /// + /// * `revoked_at` - The time at which the session was revoked. + /// + /// # Errors + /// + /// Returns an error if the session state is already [`Revoked`]. + /// + /// [`Revoked`]: SessionState::Revoked + pub fn revoke(self, revoked_at: DateTime) -> Result { + match self { + Self::Valid => Ok(Self::Revoked { revoked_at }), + Self::Revoked { .. } => Err(InvalidTransitionError), + } + } + + /// Returns the time the session was revoked, if any + /// + /// Returns `None` if the session is still [`Valid`]. + /// + /// [`Valid`]: SessionState::Valid + #[must_use] + pub fn revoked_at(&self) -> Option> { + match self { + Self::Valid => None, + Self::Revoked { revoked_at } => Some(*revoked_at), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct PersonalSession { + pub id: Ulid, + pub state: SessionState, + pub owner: PersonalSessionOwner, + pub actor_user_id: Ulid, + pub human_name: String, + /// The scope for the session, identical to OAuth 2 sessions. + /// May or may not include a device scope + /// (personal sessions can be deviceless). + pub scope: Scope, + pub created_at: DateTime, + pub last_active_at: Option>, + pub last_active_ip: Option, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize)] +pub enum PersonalSessionOwner { + /// The personal session is owned by the user with the given `user_id`. + User(Ulid), + /// The personal session is owned by the OAuth 2 Client with the given + /// `oauth2_client_id`. + OAuth2Client(Ulid), +} + +impl<'a> From<&'a User> for PersonalSessionOwner { + fn from(value: &'a User) -> Self { + PersonalSessionOwner::User(value.id) + } +} + +impl<'a> From<&'a Client> for PersonalSessionOwner { + fn from(value: &'a Client) -> Self { + PersonalSessionOwner::OAuth2Client(value.id) + } +} + +impl std::ops::Deref for PersonalSession { + type Target = SessionState; + + fn deref(&self) -> &Self::Target { + &self.state + } +} + +impl PersonalSession { + /// Marks the session as revoked. + /// + /// # Parameters + /// + /// * `revoked_at` - The time at which the session was finished. + /// + /// # Errors + /// + /// Returns an error if the session is already finished. + pub fn finish(mut self, revoked_at: DateTime) -> Result { + self.state = self.state.revoke(revoked_at)?; + Ok(self) + } + + /// Returns whether the scope of this session contains a device scope; + /// in other words: whether this session has a device. + #[must_use] + pub fn has_device(&self) -> bool { + self.scope + .iter() + .any(|scope_token| Device::from_scope_token(scope_token).is_some()) + } +} diff --git a/matrix-authentication-service/crates/data-model/src/policy_data.rs b/matrix-authentication-service/crates/data-model/src/policy_data.rs new file mode 100644 index 00000000..b732aca8 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/policy_data.rs @@ -0,0 +1,15 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use chrono::{DateTime, Utc}; +use serde::Serialize; +use ulid::Ulid; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct PolicyData { + pub id: Ulid, + pub created_at: DateTime, + pub data: serde_json::Value, +} diff --git a/matrix-authentication-service/crates/data-model/src/site_config.rs b/matrix-authentication-service/crates/data-model/src/site_config.rs new file mode 100644 index 00000000..bb92dc3e --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/site_config.rs @@ -0,0 +1,114 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::num::NonZeroU64; + +use chrono::Duration; +use serde::Serialize; +use url::Url; + +/// Which Captcha service is being used +#[derive(Debug, Clone, Copy)] +pub enum CaptchaService { + RecaptchaV2, + CloudflareTurnstile, + HCaptcha, +} + +/// Captcha configuration +#[derive(Debug, Clone)] +pub struct CaptchaConfig { + /// Which Captcha service is being used + pub service: CaptchaService, + + /// The site key used by the instance + pub site_key: String, + + /// The secret key used by the instance + pub secret_key: String, +} + +/// Automatic session expiration configuration +#[derive(Debug, Clone)] +pub struct SessionExpirationConfig { + pub user_session_inactivity_ttl: Option, + pub oauth_session_inactivity_ttl: Option, + pub compat_session_inactivity_ttl: Option, +} + +#[derive(Serialize, Debug, Clone)] +pub struct SessionLimitConfig { + pub soft_limit: NonZeroU64, + pub hard_limit: NonZeroU64, +} + +/// Random site configuration we want accessible in various places. +#[allow(clippy::struct_excessive_bools)] +#[derive(Debug, Clone)] +pub struct SiteConfig { + /// Time-to-live of access tokens. + pub access_token_ttl: Duration, + + /// Time-to-live of compatibility access tokens. + pub compat_token_ttl: Duration, + + /// The server name, e.g. "matrix.org". + pub server_name: String, + + /// The URL to the privacy policy. + pub policy_uri: Option, + + /// The URL to the terms of service. + pub tos_uri: Option, + + /// Imprint to show in the footer. + pub imprint: Option, + + /// Whether password login is enabled. + pub password_login_enabled: bool, + + /// Whether password registration is enabled. + pub password_registration_enabled: bool, + + /// Whether a valid email address is required for password registrations. + pub password_registration_email_required: bool, + + /// Whether registration tokens are required for password registrations. + pub registration_token_required: bool, + + /// Whether users can change their email. + pub email_change_allowed: bool, + + /// Whether users can change their display name. + pub displayname_change_allowed: bool, + + /// Whether users can change their password. + pub password_change_allowed: bool, + + /// Whether users can recover their account via email. + pub account_recovery_allowed: bool, + + /// Whether users can delete their own account. + pub account_deactivation_allowed: bool, + + /// Captcha configuration + pub captcha: Option, + + /// Minimum password complexity, between 0 and 4. + /// This is a score from zxcvbn. + pub minimum_password_complexity: u8, + + pub session_expiration: Option, + + /// Whether users can log in with their email address. + pub login_with_email_allowed: bool, + + /// The iframe URL to show in the plan tab of the UI + pub plan_management_iframe_uri: Option, + + /// Limits on the number of application sessions that each user can have + pub session_limit: Option, +} diff --git a/matrix-authentication-service/crates/data-model/src/tokens.rs b/matrix-authentication-service/crates/data-model/src/tokens.rs new file mode 100644 index 00000000..bd34c500 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/tokens.rs @@ -0,0 +1,492 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use base64ct::{Base64UrlUnpadded, Encoding}; +use chrono::{DateTime, Utc}; +use crc::{CRC_32_ISO_HDLC, Crc}; +use mas_iana::oauth::OAuthTokenTypeHint; +use rand::{Rng, RngCore, distributions::Alphanumeric}; +use thiserror::Error; +use ulid::Ulid; + +use crate::InvalidTransitionError; + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub enum AccessTokenState { + #[default] + Valid, + Revoked { + revoked_at: DateTime, + }, +} + +impl AccessTokenState { + fn revoke(self, revoked_at: DateTime) -> Result { + match self { + Self::Valid => Ok(Self::Revoked { revoked_at }), + Self::Revoked { .. } => Err(InvalidTransitionError), + } + } + + /// Returns `true` if the refresh token state is [`Valid`]. + /// + /// [`Valid`]: AccessTokenState::Valid + #[must_use] + pub fn is_valid(&self) -> bool { + matches!(self, Self::Valid) + } + + /// Returns `true` if the refresh token state is [`Revoked`]. + /// + /// [`Revoked`]: AccessTokenState::Revoked + #[must_use] + pub fn is_revoked(&self) -> bool { + matches!(self, Self::Revoked { .. }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AccessToken { + pub id: Ulid, + pub state: AccessTokenState, + pub session_id: Ulid, + pub access_token: String, + pub created_at: DateTime, + pub expires_at: Option>, + pub first_used_at: Option>, +} + +impl AccessToken { + #[must_use] + pub fn jti(&self) -> String { + self.id.to_string() + } + + /// Whether the access token is valid, i.e. not revoked and not expired + /// + /// # Parameters + /// + /// * `now` - The current time + #[must_use] + pub fn is_valid(&self, now: DateTime) -> bool { + self.state.is_valid() && !self.is_expired(now) + } + + /// Whether the access token is expired + /// + /// Always returns `false` if the access token does not have an expiry time. + /// + /// # Parameters + /// + /// * `now` - The current time + #[must_use] + pub fn is_expired(&self, now: DateTime) -> bool { + match self.expires_at { + Some(expires_at) => expires_at < now, + None => false, + } + } + + /// Whether the access token was used at least once + #[must_use] + pub fn is_used(&self) -> bool { + self.first_used_at.is_some() + } + + /// Mark the access token as revoked + /// + /// # Parameters + /// + /// * `revoked_at` - The time at which the access token was revoked + /// + /// # Errors + /// + /// Returns an error if the access token is already revoked + pub fn revoke(mut self, revoked_at: DateTime) -> Result { + self.state = self.state.revoke(revoked_at)?; + Ok(self) + } +} + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub enum RefreshTokenState { + #[default] + Valid, + Consumed { + consumed_at: DateTime, + next_refresh_token_id: Option, + }, + Revoked { + revoked_at: DateTime, + }, +} + +impl RefreshTokenState { + /// Consume the refresh token, returning a new state. + /// + /// # Errors + /// + /// Returns an error if the refresh token is revoked. + fn consume( + self, + consumed_at: DateTime, + replaced_by: &RefreshToken, + ) -> Result { + match self { + Self::Valid | Self::Consumed { .. } => Ok(Self::Consumed { + consumed_at, + next_refresh_token_id: Some(replaced_by.id), + }), + Self::Revoked { .. } => Err(InvalidTransitionError), + } + } + + /// Revoke the refresh token, returning a new state. + /// + /// # Errors + /// + /// Returns an error if the refresh token is already consumed or revoked. + pub fn revoke(self, revoked_at: DateTime) -> Result { + match self { + Self::Valid => Ok(Self::Revoked { revoked_at }), + Self::Consumed { .. } | Self::Revoked { .. } => Err(InvalidTransitionError), + } + } + + /// Returns `true` if the refresh token state is [`Valid`]. + /// + /// [`Valid`]: RefreshTokenState::Valid + #[must_use] + pub fn is_valid(&self) -> bool { + matches!(self, Self::Valid) + } + + /// Returns the next refresh token ID, if any. + #[must_use] + pub fn next_refresh_token_id(&self) -> Option { + match self { + Self::Valid | Self::Revoked { .. } => None, + Self::Consumed { + next_refresh_token_id, + .. + } => *next_refresh_token_id, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RefreshToken { + pub id: Ulid, + pub state: RefreshTokenState, + pub refresh_token: String, + pub session_id: Ulid, + pub created_at: DateTime, + pub access_token_id: Option, +} + +impl std::ops::Deref for RefreshToken { + type Target = RefreshTokenState; + + fn deref(&self) -> &Self::Target { + &self.state + } +} + +impl RefreshToken { + #[must_use] + pub fn jti(&self) -> String { + self.id.to_string() + } + + /// Consumes the refresh token and returns the consumed token. + /// + /// # Errors + /// + /// Returns an error if the refresh token is revoked. + pub fn consume( + mut self, + consumed_at: DateTime, + replaced_by: &Self, + ) -> Result { + self.state = self.state.consume(consumed_at, replaced_by)?; + Ok(self) + } + + /// Revokes the refresh token and returns a new revoked token + /// + /// # Errors + /// + /// Returns an error if the refresh token is already revoked. + pub fn revoke(mut self, revoked_at: DateTime) -> Result { + self.state = self.state.revoke(revoked_at)?; + Ok(self) + } +} + +/// Type of token to generate or validate +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TokenType { + /// An access token, used by Relying Parties to authenticate requests + AccessToken, + + /// A refresh token, used by the refresh token grant + RefreshToken, + + /// A legacy access token + CompatAccessToken, + + /// A legacy refresh token + CompatRefreshToken, + + /// A personal access token. + PersonalAccessToken, +} + +impl std::fmt::Display for TokenType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TokenType::AccessToken => write!(f, "access token"), + TokenType::RefreshToken => write!(f, "refresh token"), + TokenType::CompatAccessToken => write!(f, "compat access token"), + TokenType::CompatRefreshToken => write!(f, "compat refresh token"), + TokenType::PersonalAccessToken => write!(f, "personal access token"), + } + } +} + +impl TokenType { + fn prefix(self) -> &'static str { + match self { + TokenType::AccessToken => "mat", + TokenType::RefreshToken => "mar", + TokenType::CompatAccessToken => "mct", + TokenType::CompatRefreshToken => "mcr", + TokenType::PersonalAccessToken => "mpt", + } + } + + fn match_prefix(prefix: &str) -> Option { + match prefix { + "mat" => Some(TokenType::AccessToken), + "mar" => Some(TokenType::RefreshToken), + "mct" | "syt" => Some(TokenType::CompatAccessToken), + "mcr" | "syr" => Some(TokenType::CompatRefreshToken), + "mpt" => Some(TokenType::PersonalAccessToken), + _ => None, + } + } + + /// Generate a token for the given type + pub fn generate(self, rng: &mut (impl RngCore + ?Sized)) -> String { + let random_part: String = rng + .sample_iter(&Alphanumeric) + .take(30) + .map(char::from) + .collect(); + + let base = format!("{prefix}_{random_part}", prefix = self.prefix()); + let crc = CRC.checksum(base.as_bytes()); + let crc = base62_encode(crc); + format!("{base}_{crc}") + } + + /// Check the format of a token and determine its type + /// + /// # Errors + /// + /// Returns an error if the token is not valid + pub fn check(token: &str) -> Result { + // these are legacy tokens imported from Synapse + // we don't do any validation on them and continue as is + if token.starts_with("syt_") || is_likely_synapse_macaroon(token) { + return Ok(TokenType::CompatAccessToken); + } + if token.starts_with("syr_") { + return Ok(TokenType::CompatRefreshToken); + } + + let split: Vec<&str> = token.split('_').collect(); + let [prefix, random_part, crc]: [&str; 3] = split + .try_into() + .map_err(|_| TokenFormatError::InvalidFormat)?; + + if prefix.len() != 3 || random_part.len() != 30 || crc.len() != 6 { + return Err(TokenFormatError::InvalidFormat); + } + + let token_type = + TokenType::match_prefix(prefix).ok_or_else(|| TokenFormatError::UnknownPrefix { + prefix: prefix.to_owned(), + })?; + + let base = format!("{prefix}_{random_part}", prefix = token_type.prefix()); + let expected_crc = CRC.checksum(base.as_bytes()); + let expected_crc = base62_encode(expected_crc); + if crc != expected_crc { + return Err(TokenFormatError::InvalidCrc { + expected: expected_crc, + got: crc.to_owned(), + }); + } + + Ok(token_type) + } +} + +impl PartialEq for TokenType { + fn eq(&self, other: &OAuthTokenTypeHint) -> bool { + matches!( + (self, other), + ( + TokenType::AccessToken + | TokenType::CompatAccessToken + | TokenType::PersonalAccessToken, + OAuthTokenTypeHint::AccessToken + ) | ( + TokenType::RefreshToken | TokenType::CompatRefreshToken, + OAuthTokenTypeHint::RefreshToken + ) + ) + } +} + +/// Returns true if and only if a token looks like it may be a macaroon. +/// +/// Macaroons are a standard for tokens that support attenuation. +/// Synapse used them for old sessions and for guest sessions. +/// +/// We won't bother to decode them fully, but we can check to see if the first +/// constraint is the `location` constraint. +fn is_likely_synapse_macaroon(token: &str) -> bool { + let Ok(decoded) = Base64UrlUnpadded::decode_vec(token) else { + return false; + }; + decoded.get(4..13) == Some(b"location ") +} + +const NUM: [u8; 62] = *b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; + +fn base62_encode(mut num: u32) -> String { + let mut res = String::with_capacity(6); + while num > 0 { + res.push(NUM[(num % 62) as usize] as char); + num /= 62; + } + + format!("{res:0>6}") +} + +const CRC: Crc = Crc::::new(&CRC_32_ISO_HDLC); + +/// Invalid token +#[derive(Debug, Error, PartialEq, Eq)] +pub enum TokenFormatError { + /// Overall token format is invalid + #[error("invalid token format")] + InvalidFormat, + + /// Token used an unknown prefix + #[error("unknown token prefix {prefix:?}")] + UnknownPrefix { + /// The prefix found in the token + prefix: String, + }, + + /// The CRC checksum in the token is invalid + #[error("invalid crc {got:?}, expected {expected:?}")] + InvalidCrc { + /// The CRC hash expected to be found in the token + expected: String, + /// The CRC found in the token + got: String, + }, +} + +#[cfg(test)] +mod tests { + use std::collections::HashSet; + + use rand::thread_rng; + + use super::*; + + #[test] + fn test_prefix_match() { + use TokenType::{AccessToken, CompatAccessToken, CompatRefreshToken, RefreshToken}; + assert_eq!(TokenType::match_prefix("syt"), Some(CompatAccessToken)); + assert_eq!(TokenType::match_prefix("syr"), Some(CompatRefreshToken)); + assert_eq!(TokenType::match_prefix("mct"), Some(CompatAccessToken)); + assert_eq!(TokenType::match_prefix("mcr"), Some(CompatRefreshToken)); + assert_eq!(TokenType::match_prefix("mat"), Some(AccessToken)); + assert_eq!(TokenType::match_prefix("mar"), Some(RefreshToken)); + assert_eq!(TokenType::match_prefix("matt"), None); + assert_eq!(TokenType::match_prefix("marr"), None); + assert_eq!(TokenType::match_prefix("ma"), None); + assert_eq!( + TokenType::match_prefix(TokenType::CompatAccessToken.prefix()), + Some(TokenType::CompatAccessToken) + ); + assert_eq!( + TokenType::match_prefix(TokenType::CompatRefreshToken.prefix()), + Some(TokenType::CompatRefreshToken) + ); + assert_eq!( + TokenType::match_prefix(TokenType::AccessToken.prefix()), + Some(TokenType::AccessToken) + ); + assert_eq!( + TokenType::match_prefix(TokenType::RefreshToken.prefix()), + Some(TokenType::RefreshToken) + ); + } + + #[test] + fn test_is_likely_synapse_macaroon() { + // This is just the prefix of a Synapse macaroon, but it's enough to make the + // sniffing work + assert!(is_likely_synapse_macaroon( + "MDAxYmxvY2F0aW9uIGxpYnJlcHVzaC5uZXQKMDAx" + )); + + // This is a valid macaroon (even though Synapse did not generate this one) + assert!(is_likely_synapse_macaroon( + "MDAxY2xvY2F0aW9uIGh0dHA6Ly9teWJhbmsvCjAwMjZpZGVudGlmaWVyIHdlIHVzZWQgb3VyIHNlY3JldCBrZXkKMDAyZnNpZ25hdHVyZSDj2eApCFJsTAA5rhURQRXZf91ovyujebNCqvD2F9BVLwo" + )); + + // None of these are macaroons + assert!(!is_likely_synapse_macaroon( + "eyJARTOhearotnaeisahtoarsnhiasra.arsohenaor.oarnsteao" + )); + assert!(!is_likely_synapse_macaroon("....")); + assert!(!is_likely_synapse_macaroon("aaa")); + } + + #[test] + fn test_generate_and_check() { + const COUNT: usize = 500; // Generate 500 of each token type + + #[allow(clippy::disallowed_methods)] + let mut rng = thread_rng(); + + for t in [ + TokenType::CompatAccessToken, + TokenType::CompatRefreshToken, + TokenType::AccessToken, + TokenType::RefreshToken, + ] { + // Generate many tokens + let tokens: HashSet = (0..COUNT).map(|_| t.generate(&mut rng)).collect(); + + // Check that they are all different + assert_eq!(tokens.len(), COUNT, "All tokens are unique"); + + // Check that they are all valid and detected as the right token type + for token in tokens { + assert_eq!(TokenType::check(&token).unwrap(), t); + } + } + } +} diff --git a/matrix-authentication-service/crates/data-model/src/upstream_oauth2/link.rs b/matrix-authentication-service/crates/data-model/src/upstream_oauth2/link.rs new file mode 100644 index 00000000..421793ce --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/upstream_oauth2/link.rs @@ -0,0 +1,19 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use chrono::{DateTime, Utc}; +use serde::Serialize; +use ulid::Ulid; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UpstreamOAuthLink { + pub id: Ulid, + pub provider_id: Ulid, + pub user_id: Option, + pub subject: String, + pub human_account_name: Option, + pub created_at: DateTime, +} diff --git a/matrix-authentication-service/crates/data-model/src/upstream_oauth2/mod.rs b/matrix-authentication-service/crates/data-model/src/upstream_oauth2/mod.rs new file mode 100644 index 00000000..56371656 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/upstream_oauth2/mod.rs @@ -0,0 +1,26 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod link; +mod provider; +mod session; + +pub use self::{ + link::UpstreamOAuthLink, + provider::{ + ClaimsImports as UpstreamOAuthProviderClaimsImports, + DiscoveryMode as UpstreamOAuthProviderDiscoveryMode, + ImportAction as UpstreamOAuthProviderImportAction, + ImportPreference as UpstreamOAuthProviderImportPreference, + LocalpartPreference as UpstreamOAuthProviderLocalpartPreference, + OnBackchannelLogout as UpstreamOAuthProviderOnBackchannelLogout, + OnConflict as UpstreamOAuthProviderOnConflict, PkceMode as UpstreamOAuthProviderPkceMode, + ResponseMode as UpstreamOAuthProviderResponseMode, + SubjectPreference as UpstreamOAuthProviderSubjectPreference, + TokenAuthMethod as UpstreamOAuthProviderTokenAuthMethod, UpstreamOAuthProvider, + }, + session::{UpstreamOAuthAuthorizationSession, UpstreamOAuthAuthorizationSessionState}, +}; diff --git a/matrix-authentication-service/crates/data-model/src/upstream_oauth2/provider.rs b/matrix-authentication-service/crates/data-model/src/upstream_oauth2/provider.rs new file mode 100644 index 00000000..94f6c2e5 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/upstream_oauth2/provider.rs @@ -0,0 +1,435 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use chrono::{DateTime, Utc}; +use mas_iana::jose::JsonWebSignatureAlg; +use oauth2_types::scope::Scope; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::Ulid; +use url::Url; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "lowercase")] +pub enum DiscoveryMode { + /// Use OIDC discovery to fetch and verify the provider metadata + #[default] + Oidc, + + /// Use OIDC discovery to fetch the provider metadata, but don't verify it + Insecure, + + /// Don't fetch the provider metadata + Disabled, +} + +impl DiscoveryMode { + /// Returns `true` if discovery is disabled + #[must_use] + pub fn is_disabled(&self) -> bool { + matches!(self, DiscoveryMode::Disabled) + } +} + +#[derive(Debug, Clone, Error)] +#[error("Invalid discovery mode {0:?}")] +pub struct InvalidDiscoveryModeError(String); + +impl std::str::FromStr for DiscoveryMode { + type Err = InvalidDiscoveryModeError; + + fn from_str(s: &str) -> Result { + match s { + "oidc" => Ok(Self::Oidc), + "insecure" => Ok(Self::Insecure), + "disabled" => Ok(Self::Disabled), + s => Err(InvalidDiscoveryModeError(s.to_owned())), + } + } +} + +impl DiscoveryMode { + #[must_use] + pub fn as_str(self) -> &'static str { + match self { + Self::Oidc => "oidc", + Self::Insecure => "insecure", + Self::Disabled => "disabled", + } + } +} + +impl std::fmt::Display for DiscoveryMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "lowercase")] +pub enum PkceMode { + /// Use PKCE if the provider supports it + #[default] + Auto, + + /// Always use PKCE with the S256 method + S256, + + /// Don't use PKCE + Disabled, +} + +#[derive(Debug, Clone, Error)] +#[error("Invalid PKCE mode {0:?}")] +pub struct InvalidPkceModeError(String); + +impl std::str::FromStr for PkceMode { + type Err = InvalidPkceModeError; + + fn from_str(s: &str) -> Result { + match s { + "auto" => Ok(Self::Auto), + "s256" => Ok(Self::S256), + "disabled" => Ok(Self::Disabled), + s => Err(InvalidPkceModeError(s.to_owned())), + } + } +} + +impl PkceMode { + #[must_use] + pub fn as_str(self) -> &'static str { + match self { + Self::Auto => "auto", + Self::S256 => "s256", + Self::Disabled => "disabled", + } + } +} + +impl std::fmt::Display for PkceMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} + +#[derive(Debug, Clone, Error)] +#[error("Invalid response mode {0:?}")] +pub struct InvalidResponseModeError(String); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "snake_case")] +pub enum ResponseMode { + #[default] + Query, + FormPost, +} + +impl From for oauth2_types::requests::ResponseMode { + fn from(value: ResponseMode) -> Self { + match value { + ResponseMode::Query => oauth2_types::requests::ResponseMode::Query, + ResponseMode::FormPost => oauth2_types::requests::ResponseMode::FormPost, + } + } +} + +impl ResponseMode { + #[must_use] + pub fn as_str(self) -> &'static str { + match self { + Self::Query => "query", + Self::FormPost => "form_post", + } + } +} + +impl std::fmt::Display for ResponseMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} + +impl std::str::FromStr for ResponseMode { + type Err = InvalidResponseModeError; + + fn from_str(s: &str) -> Result { + match s { + "query" => Ok(ResponseMode::Query), + "form_post" => Ok(ResponseMode::FormPost), + s => Err(InvalidResponseModeError(s.to_owned())), + } + } +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum TokenAuthMethod { + None, + ClientSecretBasic, + ClientSecretPost, + ClientSecretJwt, + PrivateKeyJwt, + SignInWithApple, +} + +impl TokenAuthMethod { + #[must_use] + pub fn as_str(self) -> &'static str { + match self { + Self::None => "none", + Self::ClientSecretBasic => "client_secret_basic", + Self::ClientSecretPost => "client_secret_post", + Self::ClientSecretJwt => "client_secret_jwt", + Self::PrivateKeyJwt => "private_key_jwt", + Self::SignInWithApple => "sign_in_with_apple", + } + } +} + +impl std::fmt::Display for TokenAuthMethod { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} + +impl std::str::FromStr for TokenAuthMethod { + type Err = InvalidUpstreamOAuth2TokenAuthMethod; + + fn from_str(s: &str) -> Result { + match s { + "none" => Ok(Self::None), + "client_secret_post" => Ok(Self::ClientSecretPost), + "client_secret_basic" => Ok(Self::ClientSecretBasic), + "client_secret_jwt" => Ok(Self::ClientSecretJwt), + "private_key_jwt" => Ok(Self::PrivateKeyJwt), + "sign_in_with_apple" => Ok(Self::SignInWithApple), + s => Err(InvalidUpstreamOAuth2TokenAuthMethod(s.to_owned())), + } + } +} + +#[derive(Debug, Clone, Error)] +#[error("Invalid upstream OAuth 2.0 token auth method: {0}")] +pub struct InvalidUpstreamOAuth2TokenAuthMethod(String); + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum OnBackchannelLogout { + DoNothing, + LogoutBrowserOnly, + LogoutAll, +} + +impl OnBackchannelLogout { + #[must_use] + pub fn as_str(self) -> &'static str { + match self { + Self::DoNothing => "do_nothing", + Self::LogoutBrowserOnly => "logout_browser_only", + Self::LogoutAll => "logout_all", + } + } +} + +impl std::fmt::Display for OnBackchannelLogout { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} + +impl std::str::FromStr for OnBackchannelLogout { + type Err = InvalidUpstreamOAuth2OnBackchannelLogout; + + fn from_str(s: &str) -> Result { + match s { + "do_nothing" => Ok(Self::DoNothing), + "logout_browser_only" => Ok(Self::LogoutBrowserOnly), + "logout_all" => Ok(Self::LogoutAll), + s => Err(InvalidUpstreamOAuth2OnBackchannelLogout(s.to_owned())), + } + } +} + +#[derive(Debug, Clone, Error)] +#[error("Invalid upstream OAuth 2.0 'on backchannel logout': {0}")] +pub struct InvalidUpstreamOAuth2OnBackchannelLogout(String); + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UpstreamOAuthProvider { + pub id: Ulid, + pub issuer: Option, + pub human_name: Option, + pub brand_name: Option, + pub discovery_mode: DiscoveryMode, + pub pkce_mode: PkceMode, + pub jwks_uri_override: Option, + pub authorization_endpoint_override: Option, + pub scope: Scope, + pub token_endpoint_override: Option, + pub userinfo_endpoint_override: Option, + pub fetch_userinfo: bool, + pub userinfo_signed_response_alg: Option, + pub client_id: String, + pub encrypted_client_secret: Option, + pub token_endpoint_signing_alg: Option, + pub token_endpoint_auth_method: TokenAuthMethod, + pub id_token_signed_response_alg: JsonWebSignatureAlg, + pub response_mode: Option, + pub created_at: DateTime, + pub disabled_at: Option>, + pub claims_imports: ClaimsImports, + pub additional_authorization_parameters: Vec<(String, String)>, + pub forward_login_hint: bool, + pub on_backchannel_logout: OnBackchannelLogout, +} + +impl PartialOrd for UpstreamOAuthProvider { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for UpstreamOAuthProvider { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.id.cmp(&other.id) + } +} + +impl UpstreamOAuthProvider { + /// Returns `true` if the provider is enabled + #[must_use] + pub const fn enabled(&self) -> bool { + self.disabled_at.is_none() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct ClaimsImports { + #[serde(default)] + pub subject: SubjectPreference, + + #[serde(default)] + pub skip_confirmation: bool, + + #[serde(default)] + pub localpart: LocalpartPreference, + + #[serde(default)] + pub displayname: ImportPreference, + + #[serde(default)] + pub email: ImportPreference, + + #[serde(default)] + pub account_name: SubjectPreference, +} + +// XXX: this should have another name +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct SubjectPreference { + #[serde(default)] + pub template: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct LocalpartPreference { + #[serde(default)] + pub action: ImportAction, + + #[serde(default)] + pub template: Option, + + #[serde(default)] + pub on_conflict: OnConflict, +} + +impl std::ops::Deref for LocalpartPreference { + type Target = ImportAction; + + fn deref(&self) -> &Self::Target { + &self.action + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct ImportPreference { + #[serde(default)] + pub action: ImportAction, + + #[serde(default)] + pub template: Option, +} + +impl std::ops::Deref for ImportPreference { + type Target = ImportAction; + + fn deref(&self) -> &Self::Target { + &self.action + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "lowercase")] +pub enum ImportAction { + /// Ignore the claim + #[default] + Ignore, + + /// Suggest the claim value, but allow the user to change it + Suggest, + + /// Force the claim value, but don't fail if it is missing + Force, + + /// Force the claim value, and fail if it is missing + Require, +} + +impl ImportAction { + #[must_use] + pub fn is_forced_or_required(&self) -> bool { + matches!(self, Self::Force | Self::Require) + } + + #[must_use] + pub fn ignore(&self) -> bool { + matches!(self, Self::Ignore) + } + + #[must_use] + pub fn is_required(&self) -> bool { + matches!(self, Self::Require) + } + + #[must_use] + pub fn should_import(&self, user_preference: bool) -> bool { + match self { + Self::Ignore => false, + Self::Suggest => user_preference, + Self::Force | Self::Require => true, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "lowercase")] +pub enum OnConflict { + /// Fails the upstream OAuth 2.0 login on conflict + #[default] + Fail, + + /// Adds the upstream OAuth 2.0 identity link, regardless of whether there + /// is an existing link or not + Add, + + /// Replace any existing upstream OAuth 2.0 identity link + Replace, + + /// Adds the upstream OAuth 2.0 identity link *only* if there is no existing + /// link for this provider on the matching user + Set, +} diff --git a/matrix-authentication-service/crates/data-model/src/upstream_oauth2/session.rs b/matrix-authentication-service/crates/data-model/src/upstream_oauth2/session.rs new file mode 100644 index 00000000..e7dad713 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/upstream_oauth2/session.rs @@ -0,0 +1,338 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use chrono::{DateTime, Utc}; +use serde::Serialize; +use ulid::Ulid; + +use super::UpstreamOAuthLink; +use crate::InvalidTransitionError; + +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize)] +pub enum UpstreamOAuthAuthorizationSessionState { + #[default] + Pending, + Completed { + completed_at: DateTime, + link_id: Ulid, + id_token: Option, + id_token_claims: Option, + extra_callback_parameters: Option, + userinfo: Option, + }, + Consumed { + completed_at: DateTime, + consumed_at: DateTime, + link_id: Ulid, + id_token: Option, + id_token_claims: Option, + extra_callback_parameters: Option, + userinfo: Option, + }, + Unlinked { + completed_at: DateTime, + consumed_at: Option>, + unlinked_at: DateTime, + id_token: Option, + id_token_claims: Option, + }, +} + +impl UpstreamOAuthAuthorizationSessionState { + /// Mark the upstream OAuth 2.0 authorization session as completed. + /// + /// # Errors + /// + /// Returns an error if the upstream OAuth 2.0 authorization session state + /// is not [`Pending`]. + /// + /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending + pub fn complete( + self, + completed_at: DateTime, + link: &UpstreamOAuthLink, + id_token: Option, + id_token_claims: Option, + extra_callback_parameters: Option, + userinfo: Option, + ) -> Result { + match self { + Self::Pending => Ok(Self::Completed { + completed_at, + link_id: link.id, + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + }), + Self::Completed { .. } | Self::Consumed { .. } | Self::Unlinked { .. } => { + Err(InvalidTransitionError) + } + } + } + + /// Mark the upstream OAuth 2.0 authorization session as consumed. + /// + /// # Errors + /// + /// Returns an error if the upstream OAuth 2.0 authorization session state + /// is not [`Completed`]. + /// + /// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed + pub fn consume(self, consumed_at: DateTime) -> Result { + match self { + Self::Completed { + completed_at, + link_id, + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + } => Ok(Self::Consumed { + completed_at, + link_id, + consumed_at, + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + }), + Self::Pending | Self::Consumed { .. } | Self::Unlinked { .. } => { + Err(InvalidTransitionError) + } + } + } + + /// Get the link ID for the upstream OAuth 2.0 authorization session. + /// + /// Returns `None` if the upstream OAuth 2.0 authorization session state is + /// [`Pending`]. + /// + /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending + #[must_use] + pub fn link_id(&self) -> Option { + match self { + Self::Pending | Self::Unlinked { .. } => None, + Self::Completed { link_id, .. } | Self::Consumed { link_id, .. } => Some(*link_id), + } + } + + /// Get the time at which the upstream OAuth 2.0 authorization session was + /// completed. + /// + /// Returns `None` if the upstream OAuth 2.0 authorization session state is + /// [`Pending`]. + /// + /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending + #[must_use] + pub fn completed_at(&self) -> Option> { + match self { + Self::Pending => None, + Self::Completed { completed_at, .. } + | Self::Consumed { completed_at, .. } + | Self::Unlinked { completed_at, .. } => Some(*completed_at), + } + } + + /// Get the ID token for the upstream OAuth 2.0 authorization session. + /// + /// Returns `None` if the upstream OAuth 2.0 authorization session state is + /// [`Pending`]. + /// + /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending + #[must_use] + pub fn id_token(&self) -> Option<&str> { + match self { + Self::Pending => None, + Self::Completed { id_token, .. } + | Self::Consumed { id_token, .. } + | Self::Unlinked { id_token, .. } => id_token.as_deref(), + } + } + + /// Get the ID token claims for the upstream OAuth 2.0 authorization + /// session. + /// + /// Returns `None` if the upstream OAuth 2.0 authorization session state is + /// not [`Pending`]. + /// + /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending + #[must_use] + pub fn id_token_claims(&self) -> Option<&serde_json::Value> { + match self { + Self::Pending => None, + Self::Completed { + id_token_claims, .. + } + | Self::Consumed { + id_token_claims, .. + } + | Self::Unlinked { + id_token_claims, .. + } => id_token_claims.as_ref(), + } + } + + /// Get the extra query parameters that were sent to the upstream provider. + /// + /// Returns `None` if the upstream OAuth 2.0 authorization session state is + /// not [`Pending`]. + /// + /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending + #[must_use] + pub fn extra_callback_parameters(&self) -> Option<&serde_json::Value> { + match self { + Self::Pending | Self::Unlinked { .. } => None, + Self::Completed { + extra_callback_parameters, + .. + } + | Self::Consumed { + extra_callback_parameters, + .. + } => extra_callback_parameters.as_ref(), + } + } + + #[must_use] + pub fn userinfo(&self) -> Option<&serde_json::Value> { + match self { + Self::Pending | Self::Unlinked { .. } => None, + Self::Completed { userinfo, .. } | Self::Consumed { userinfo, .. } => userinfo.as_ref(), + } + } + + /// Get the time at which the upstream OAuth 2.0 authorization session was + /// consumed. + /// + /// Returns `None` if the upstream OAuth 2.0 authorization session state is + /// not [`Consumed`]. + /// + /// [`Consumed`]: UpstreamOAuthAuthorizationSessionState::Consumed + #[must_use] + pub fn consumed_at(&self) -> Option> { + match self { + Self::Pending | Self::Completed { .. } => None, + Self::Consumed { consumed_at, .. } => Some(*consumed_at), + Self::Unlinked { consumed_at, .. } => *consumed_at, + } + } + + /// Get the time at which the upstream OAuth 2.0 authorization session was + /// unlinked. + /// + /// Returns `None` if the upstream OAuth 2.0 authorization session state is + /// not [`Unlinked`]. + /// + /// [`Unlinked`]: UpstreamOAuthAuthorizationSessionState::Unlinked + #[must_use] + pub fn unlinked_at(&self) -> Option> { + match self { + Self::Pending | Self::Completed { .. } | Self::Consumed { .. } => None, + Self::Unlinked { unlinked_at, .. } => Some(*unlinked_at), + } + } + + /// Returns `true` if the upstream OAuth 2.0 authorization session state is + /// [`Pending`]. + /// + /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending + #[must_use] + pub fn is_pending(&self) -> bool { + matches!(self, Self::Pending) + } + + /// Returns `true` if the upstream OAuth 2.0 authorization session state is + /// [`Completed`]. + /// + /// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed + #[must_use] + pub fn is_completed(&self) -> bool { + matches!(self, Self::Completed { .. }) + } + + /// Returns `true` if the upstream OAuth 2.0 authorization session state is + /// [`Consumed`]. + /// + /// [`Consumed`]: UpstreamOAuthAuthorizationSessionState::Consumed + #[must_use] + pub fn is_consumed(&self) -> bool { + matches!(self, Self::Consumed { .. }) + } + + /// Returns `true` if the upstream OAuth 2.0 authorization session state is + /// [`Unlinked`]. + /// + /// [`Unlinked`]: UpstreamOAuthAuthorizationSessionState::Unlinked + #[must_use] + pub fn is_unlinked(&self) -> bool { + matches!(self, Self::Unlinked { .. }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UpstreamOAuthAuthorizationSession { + pub id: Ulid, + pub state: UpstreamOAuthAuthorizationSessionState, + pub provider_id: Ulid, + pub state_str: String, + pub code_challenge_verifier: Option, + pub nonce: Option, + pub created_at: DateTime, +} + +impl std::ops::Deref for UpstreamOAuthAuthorizationSession { + type Target = UpstreamOAuthAuthorizationSessionState; + + fn deref(&self) -> &Self::Target { + &self.state + } +} + +impl UpstreamOAuthAuthorizationSession { + /// Mark the upstream OAuth 2.0 authorization session as completed. Returns + /// the updated session. + /// + /// # Errors + /// + /// Returns an error if the upstream OAuth 2.0 authorization session state + /// is not [`Pending`]. + /// + /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending + pub fn complete( + mut self, + completed_at: DateTime, + link: &UpstreamOAuthLink, + id_token: Option, + id_token_claims: Option, + extra_callback_parameters: Option, + userinfo: Option, + ) -> Result { + self.state = self.state.complete( + completed_at, + link, + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + )?; + Ok(self) + } + + /// Mark the upstream OAuth 2.0 authorization session as consumed. Returns + /// the updated session. + /// + /// # Errors + /// + /// Returns an error if the upstream OAuth 2.0 authorization session state + /// is not [`Completed`]. + /// + /// [`Completed`]: UpstreamOAuthAuthorizationSessionState::Completed + pub fn consume(mut self, consumed_at: DateTime) -> Result { + self.state = self.state.consume(consumed_at)?; + Ok(self) + } +} diff --git a/matrix-authentication-service/crates/data-model/src/user_agent.rs b/matrix-authentication-service/crates/data-model/src/user_agent.rs new file mode 100644 index 00000000..d0e93058 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/user_agent.rs @@ -0,0 +1,225 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::LazyLock; + +use serde::Serialize; +use woothee::{parser::Parser, woothee::VALUE_UNKNOWN}; + +static CUSTOM_USER_AGENT_REGEX: LazyLock = LazyLock::new(|| { + regex::Regex::new(r"^(?P[^/]+)/(?P[^ ]+) \((?P.+)\)$").unwrap() +}); + +static ELECTRON_USER_AGENT_REGEX: LazyLock = + LazyLock::new(|| regex::Regex::new(r"(?m)\w+/[\w.]+").unwrap()); + +#[derive(Debug, Serialize, Clone, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum DeviceType { + Pc, + Mobile, + Tablet, + Unknown, +} + +#[derive(Debug, Serialize, Clone, PartialEq, Eq)] +pub struct UserAgent { + pub name: Option, + pub version: Option, + pub os: Option, + pub os_version: Option, + pub model: Option, + pub device_type: DeviceType, + pub raw: String, +} + +impl std::ops::Deref for UserAgent { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.raw + } +} + +impl UserAgent { + fn parse_custom(user_agent: &str) -> Option<(&str, &str, &str, &str, Option<&str>)> { + let captures = CUSTOM_USER_AGENT_REGEX.captures(user_agent)?; + let name = captures.name("name")?.as_str(); + let version = captures.name("version")?.as_str(); + let segments: Vec<&str> = captures + .name("segments")? + .as_str() + .split(';') + .map(str::trim) + .collect(); + + match segments[..] { + ["Linux", "U", os, model, ..] | [model, os, ..] => { + // Most android model have a `/[build version]` suffix we don't care about + let model = model.split_once('/').map_or(model, |(model, _)| model); + // Some android version also have `Build/[build version]` suffix we don't care + // about + let model = model.strip_suffix("Build").unwrap_or(model); + // And let's trim any leftovers + let model = model.trim(); + + let (os, os_version) = if let Some((os, version)) = os.split_once(' ') { + (os, Some(version)) + } else { + (os, None) + }; + + Some((name, version, model, os, os_version)) + } + _ => None, + } + } + + fn parse_electron(user_agent: &str) -> Option<(&str, &str)> { + let omit_keys = ["Mozilla", "AppleWebKit", "Chrome", "Electron", "Safari"]; + return ELECTRON_USER_AGENT_REGEX + .find_iter(user_agent) + .map(|caps| caps.as_str().split_once('/').unwrap()) + .find(|pair| !omit_keys.contains(&pair.0)); + } + + #[must_use] + pub fn parse(user_agent: String) -> Self { + if !user_agent.contains("Mozilla/") + && let Some((name, version, model, os, os_version)) = + UserAgent::parse_custom(&user_agent) + { + let mut device_type = DeviceType::Unknown; + + // Handle mobile simple mobile devices + if os == "Android" || os == "iOS" { + device_type = DeviceType::Mobile; + } + + // Handle iPads + if model.contains("iPad") { + device_type = DeviceType::Tablet; + } + + return Self { + name: Some(name.to_owned()), + version: Some(version.to_owned()), + os: Some(os.to_owned()), + os_version: os_version.map(std::borrow::ToOwned::to_owned), + model: Some(model.to_owned()), + device_type, + raw: user_agent, + }; + } + + let mut model = None; + let Some(mut result) = Parser::new().parse(&user_agent) else { + return Self { + raw: user_agent, + name: None, + version: None, + os: None, + os_version: None, + model: None, + device_type: DeviceType::Unknown, + }; + }; + + let mut device_type = match result.category { + "pc" => DeviceType::Pc, + "smartphone" | "mobilephone" => DeviceType::Mobile, + _ => DeviceType::Unknown, + }; + + // Special handling for Chrome user-agent reduction cases + // https://www.chromium.org/updates/ua-reduction/ + match (result.os, &*result.os_version) { + // Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/533.88 (KHTML, like Gecko) + // Chrome/109.1.2342.76 Safari/533.88 + ("Windows 10", "NT 10.0") if user_agent.contains("Windows NT 10.0; Win64; x64") => { + result.os = "Windows"; + result.os_version = VALUE_UNKNOWN.into(); + } + + // Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) + // Chrome/100.0.0.0 Safari/537.36 + ("Linux", _) if user_agent.contains("X11; Linux x86_64") => { + result.os = "Linux"; + result.os_version = VALUE_UNKNOWN.into(); + } + + // Mozilla/5.0 (X11; CrOS x86_64 14541.0.0) AppleWebKit/537.36 (KHTML, like Gecko) + // Chrome/107.0.0.0 Safari/537.36 + ("ChromeOS", _) if user_agent.contains("X11; CrOS x86_64 14541.0.0") => { + result.os = "Chrome OS"; + result.os_version = VALUE_UNKNOWN.into(); + } + + // Mozilla/5.0 (Linux; Android 10; K) AppleWebKit/537.36 (KHTML, like Gecko) + // Chrome/100.0.0.0 Mobile Safari/537.36 + ("Android", "10") if user_agent.contains("Linux; Android 10; K") => { + result.os = "Android"; + result.os_version = VALUE_UNKNOWN.into(); + } + + // Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like + // Gecko) Chrome/100.0.4896.133 Safari/537.36 + // Safari also freezes the OS version + // Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like + // Gecko) Version/17.3.1 Safari/605.1.15 + ("Mac OSX", "10.15.7") if user_agent.contains("Macintosh; Intel Mac OS X 10_15_7") => { + result.os = "macOS"; + result.os_version = VALUE_UNKNOWN.into(); + } + + // Woothee identifies iPhone and iPod in the OS, but we want to map them to iOS and use + // them as model + ("iPhone" | "iPod", _) => { + model = Some(result.os.to_owned()); + result.os = "iOS"; + } + + ("iPad", _) => { + model = Some(result.os.to_owned()); + device_type = DeviceType::Tablet; + result.os = "iPadOS"; + } + + // Also map `Mac OSX` to `macOS` + ("Mac OSX", _) => { + result.os = "macOS"; + } + + _ => {} + } + + // For some reason, the version on Windows is on the OS field + // This transforms `Windows 10` into `Windows` and `10` + if let Some(version) = result.os.strip_prefix("Windows ") { + result.os = "Windows"; + result.os_version = version.into(); + } + + // Special handling for Electron applications e.g. Element Desktop + if user_agent.contains("Electron/") + && let Some(app) = UserAgent::parse_electron(&user_agent) + { + result.name = app.0; + result.version = app.1; + } + + Self { + name: (result.name != VALUE_UNKNOWN).then(|| result.name.to_owned()), + version: (result.version != VALUE_UNKNOWN).then(|| result.version.to_owned()), + os: (result.os != VALUE_UNKNOWN).then(|| result.os.to_owned()), + os_version: (result.os_version != VALUE_UNKNOWN) + .then(|| result.os_version.into_owned()), + device_type, + model, + raw: user_agent, + } + } +} diff --git a/matrix-authentication-service/crates/data-model/src/users.rs b/matrix-authentication-service/crates/data-model/src/users.rs new file mode 100644 index 00000000..78c483e1 --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/users.rs @@ -0,0 +1,287 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use chrono::{DateTime, Utc}; +use rand::Rng; +use serde::Serialize; +use ulid::Ulid; +use url::Url; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct MatrixUser { + pub mxid: String, + pub display_name: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct User { + pub id: Ulid, + pub username: String, + pub sub: String, + pub created_at: DateTime, + pub locked_at: Option>, + pub deactivated_at: Option>, + pub can_request_admin: bool, + pub is_guest: bool, +} + +impl User { + /// Returns `true` unless the user is locked or deactivated. + #[must_use] + pub fn is_valid(&self) -> bool { + self.locked_at.is_none() && self.deactivated_at.is_none() + } + + /// Returns `true` if the user is a valid actor, for example + /// of a personal session. + /// + /// Currently: this is `true` unless the user is deactivated. + /// + /// This is a weaker form of validity: `is_valid` always implies + /// `is_valid_actor`, but some users (currently: locked users) + /// can be valid actors for personal sessions but aren't valid + /// except through administrative access. + #[must_use] + pub fn is_valid_actor(&self) -> bool { + self.deactivated_at.is_none() + } +} + +impl User { + #[doc(hidden)] + #[must_use] + pub fn samples(now: chrono::DateTime, rng: &mut impl Rng) -> Vec { + vec![User { + id: Ulid::from_datetime_with_source(now.into(), rng), + username: "john".to_owned(), + sub: "123-456".to_owned(), + created_at: now, + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }] + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct Password { + pub id: Ulid, + pub hashed_password: String, + pub version: u16, + pub upgraded_from_id: Option, + pub created_at: DateTime, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct Authentication { + pub id: Ulid, + pub created_at: DateTime, + pub authentication_method: AuthenticationMethod, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub enum AuthenticationMethod { + Password { user_password_id: Ulid }, + UpstreamOAuth2 { upstream_oauth2_session_id: Ulid }, + Unknown, +} + +/// A session to recover a user if they have lost their credentials +/// +/// For each session intiated, there may be multiple [`UserRecoveryTicket`]s +/// sent to the user, either because multiple [`User`] have the same email +/// address, or because the user asked to send the recovery email again. +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UserRecoverySession { + pub id: Ulid, + pub email: String, + pub user_agent: String, + pub ip_address: Option, + pub locale: String, + pub created_at: DateTime, + pub consumed_at: Option>, +} + +/// A single recovery ticket for a user recovery session +/// +/// Whenever a new recovery session is initiated, a new ticket is created for +/// each email address matching in the database. That ticket is sent by email, +/// as a link that the user can click to recover their account. +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UserRecoveryTicket { + pub id: Ulid, + pub user_recovery_session_id: Ulid, + pub user_email_id: Ulid, + pub ticket: String, + pub created_at: DateTime, + pub expires_at: DateTime, +} + +impl UserRecoveryTicket { + #[must_use] + pub fn active(&self, now: DateTime) -> bool { + now < self.expires_at + } +} + +/// A user email authentication session +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UserEmailAuthentication { + pub id: Ulid, + pub user_session_id: Option, + pub user_registration_id: Option, + pub email: String, + pub created_at: DateTime, + pub completed_at: Option>, +} + +/// A user email authentication code +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UserEmailAuthenticationCode { + pub id: Ulid, + pub user_email_authentication_id: Ulid, + pub code: String, + pub created_at: DateTime, + pub expires_at: DateTime, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct BrowserSession { + pub id: Ulid, + pub user: User, + pub created_at: DateTime, + pub finished_at: Option>, + pub user_agent: Option, + pub last_active_at: Option>, + pub last_active_ip: Option, +} + +impl BrowserSession { + #[must_use] + pub fn active(&self) -> bool { + self.finished_at.is_none() && self.user.is_valid() + } +} + +impl BrowserSession { + #[must_use] + pub fn samples(now: chrono::DateTime, rng: &mut impl Rng) -> Vec { + User::samples(now, rng) + .into_iter() + .map(|user| BrowserSession { + id: Ulid::from_datetime_with_source(now.into(), rng), + user, + created_at: now, + finished_at: None, + user_agent: Some( + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.0.0 Safari/537.36".to_owned() + ), + last_active_at: Some(now), + last_active_ip: None, + }) + .collect() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UserEmail { + pub id: Ulid, + pub user_id: Ulid, + pub email: String, + pub created_at: DateTime, +} + +impl UserEmail { + #[must_use] + pub fn samples(now: chrono::DateTime, rng: &mut impl Rng) -> Vec { + vec![ + Self { + id: Ulid::from_datetime_with_source(now.into(), rng), + user_id: Ulid::from_datetime_with_source(now.into(), rng), + email: "alice@example.com".to_owned(), + created_at: now, + }, + Self { + id: Ulid::from_datetime_with_source(now.into(), rng), + user_id: Ulid::from_datetime_with_source(now.into(), rng), + email: "bob@example.com".to_owned(), + created_at: now, + }, + ] + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UserRegistrationPassword { + pub hashed_password: String, + pub version: u16, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UserRegistrationToken { + pub id: Ulid, + pub token: String, + pub usage_limit: Option, + pub times_used: u32, + pub created_at: DateTime, + pub last_used_at: Option>, + pub expires_at: Option>, + pub revoked_at: Option>, +} + +impl UserRegistrationToken { + /// Returns `true` if the token is still valid and can be used + #[must_use] + pub fn is_valid(&self, now: DateTime) -> bool { + // Check if revoked + if self.revoked_at.is_some() { + return false; + } + + // Check if expired + if let Some(expires_at) = self.expires_at + && now >= expires_at + { + return false; + } + + // Check if usage limit exceeded + if let Some(usage_limit) = self.usage_limit + && self.times_used >= usage_limit + { + return false; + } + + true + } + + /// Returns `true` if the token can still be used (not expired and under + /// usage limit) + #[must_use] + pub fn can_be_used(&self, now: DateTime) -> bool { + self.is_valid(now) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize)] +pub struct UserRegistration { + pub id: Ulid, + pub username: String, + pub display_name: Option, + pub terms_url: Option, + pub email_authentication_id: Option, + pub user_registration_token_id: Option, + pub password: Option, + pub upstream_oauth_authorization_session_id: Option, + pub post_auth_action: Option, + pub ip_address: Option, + pub user_agent: Option, + pub created_at: DateTime, + pub completed_at: Option>, +} diff --git a/matrix-authentication-service/crates/data-model/src/utils.rs b/matrix-authentication-service/crates/data-model/src/utils.rs new file mode 100644 index 00000000..bd3b15de --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/utils.rs @@ -0,0 +1,13 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use rand_chacha::rand_core::CryptoRngCore; + +use crate::clock::Clock; + +/// A boxed [`Clock`] +pub type BoxClock = Box; +/// A boxed random number generator +pub type BoxRng = Box; diff --git a/matrix-authentication-service/crates/data-model/src/version.rs b/matrix-authentication-service/crates/data-model/src/version.rs new file mode 100644 index 00000000..86d890fc --- /dev/null +++ b/matrix-authentication-service/crates/data-model/src/version.rs @@ -0,0 +1,8 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +/// A structure which holds information about the running version of the app +#[derive(Debug, Clone, Copy)] +pub struct AppVersion(pub &'static str); diff --git a/matrix-authentication-service/crates/email/Cargo.toml b/matrix-authentication-service/crates/email/Cargo.toml new file mode 100644 index 00000000..eba4ea6f --- /dev/null +++ b/matrix-authentication-service/crates/email/Cargo.toml @@ -0,0 +1,25 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-email" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +async-trait.workspace = true +lettre.workspace = true +thiserror.workspace = true +tracing.workspace = true + +mas-templates.workspace = true diff --git a/matrix-authentication-service/crates/email/src/lib.rs b/matrix-authentication-service/crates/email/src/lib.rs new file mode 100644 index 00000000..ee731f74 --- /dev/null +++ b/matrix-authentication-service/crates/email/src/lib.rs @@ -0,0 +1,22 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Helps sending emails to users, with different email backends + +#![deny(missing_docs)] + +mod mailer; +mod transport; + +pub use lettre::{ + Address, message::Mailbox, transport::smtp::authentication::Credentials as SmtpCredentials, +}; +pub use mas_templates::EmailVerificationContext; + +pub use self::{ + mailer::Mailer, + transport::{SmtpMode, Transport as MailTransport}, +}; diff --git a/matrix-authentication-service/crates/email/src/mailer.rs b/matrix-authentication-service/crates/email/src/mailer.rs new file mode 100644 index 00000000..0b5f96d9 --- /dev/null +++ b/matrix-authentication-service/crates/email/src/mailer.rs @@ -0,0 +1,162 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Send emails to users + +use lettre::{ + AsyncTransport, Message, + message::{Mailbox, MessageBuilder, MultiPart}, +}; +use mas_templates::{EmailRecoveryContext, EmailVerificationContext, Templates, WithLanguage}; +use thiserror::Error; + +use crate::MailTransport; + +/// Helps sending mails to users +#[derive(Clone)] +pub struct Mailer { + templates: Templates, + transport: MailTransport, + from: Mailbox, + reply_to: Mailbox, +} + +#[derive(Debug, Error)] +#[error(transparent)] +pub enum Error { + Transport(#[from] crate::transport::Error), + Templates(#[from] mas_templates::TemplateError), + Content(#[from] lettre::error::Error), +} + +impl Mailer { + /// Constructs a new [`Mailer`] + #[must_use] + pub fn new( + templates: Templates, + transport: MailTransport, + from: Mailbox, + reply_to: Mailbox, + ) -> Self { + Self { + templates, + transport, + from, + reply_to, + } + } + + fn base_message(&self) -> MessageBuilder { + Message::builder() + .from(self.from.clone()) + .reply_to(self.reply_to.clone()) + // By passing `None`, lettre generates a random message ID + // with a random UUID and the hostname for us + .message_id(None) + } + + fn prepare_verification_email( + &self, + to: Mailbox, + context: &WithLanguage, + ) -> Result { + let plain = self.templates.render_email_verification_txt(context)?; + + let html = self.templates.render_email_verification_html(context)?; + + let multipart = MultiPart::alternative_plain_html(plain, html); + + let subject = self.templates.render_email_verification_subject(context)?; + + let message = self + .base_message() + .subject(subject.trim()) + .to(to) + .multipart(multipart)?; + + Ok(message) + } + + fn prepare_recovery_email( + &self, + to: Mailbox, + context: &WithLanguage, + ) -> Result { + let plain = self.templates.render_email_recovery_txt(context)?; + + let html = self.templates.render_email_recovery_html(context)?; + + let multipart = MultiPart::alternative_plain_html(plain, html); + + let subject = self.templates.render_email_recovery_subject(context)?; + + let message = self + .base_message() + .subject(subject.trim()) + .to(to) + .multipart(multipart)?; + + Ok(message) + } + + /// Send the verification email to a user + /// + /// # Errors + /// + /// Will return `Err` if the email failed rendering or failed sending + #[tracing::instrument( + name = "email.verification.send", + skip_all, + fields( + email.to = %to, + email.language = %context.language(), + ), + )] + pub async fn send_verification_email( + &self, + to: Mailbox, + context: &WithLanguage, + ) -> Result<(), Error> { + let message = self.prepare_verification_email(to, context)?; + self.transport.send(message).await?; + Ok(()) + } + + /// Send the recovery email to a user + /// + /// # Errors + /// + /// Will return `Err` if the email failed rendering or failed sending + #[tracing::instrument( + name = "email.recovery.send", + skip_all, + fields( + email.to = %to, + email.language = %context.language(), + user.id = %context.user().id, + user_recovery_session.id = %context.session().id, + ), + )] + pub async fn send_recovery_email( + &self, + to: Mailbox, + context: &WithLanguage, + ) -> Result<(), Error> { + let message = self.prepare_recovery_email(to, context)?; + self.transport.send(message).await?; + Ok(()) + } + + /// Test the connetion to the mail server + /// + /// # Errors + /// + /// Returns an error if the connection failed + #[tracing::instrument(name = "email.test_connection", skip_all)] + pub async fn test_connection(&self) -> Result<(), crate::transport::Error> { + self.transport.test_connection().await + } +} diff --git a/matrix-authentication-service/crates/email/src/transport.rs b/matrix-authentication-service/crates/email/src/transport.rs new file mode 100644 index 00000000..004844ab --- /dev/null +++ b/matrix-authentication-service/crates/email/src/transport.rs @@ -0,0 +1,147 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Email transport backends + +use std::{ffi::OsString, num::NonZeroU16, sync::Arc}; + +use async_trait::async_trait; +use lettre::{ + AsyncTransport, Tokio1Executor, + address::Envelope, + transport::{ + sendmail::AsyncSendmailTransport, + smtp::{AsyncSmtpTransport, authentication::Credentials}, + }, +}; +use thiserror::Error; + +/// Encryption mode to use +#[derive(Debug, Clone, Copy)] +pub enum SmtpMode { + /// Plain text + Plain, + /// `StartTLS` (starts as plain text then upgrade to TLS) + StartTls, + /// TLS + Tls, +} + +/// A wrapper around many [`AsyncTransport`]s +#[derive(Default, Clone)] +pub struct Transport { + inner: Arc, +} + +#[derive(Default)] +enum TransportInner { + #[default] + Blackhole, + Smtp(AsyncSmtpTransport), + Sendmail(AsyncSendmailTransport), +} + +impl Transport { + fn new(inner: TransportInner) -> Self { + let inner = Arc::new(inner); + Self { inner } + } + + /// Construct a blackhole transport + #[must_use] + pub fn blackhole() -> Self { + Self::new(TransportInner::Blackhole) + } + + /// Construct a SMTP transport + /// + /// # Errors + /// + /// Returns an error if the underlying SMTP transport could not be built + pub fn smtp( + mode: SmtpMode, + hostname: &str, + port: Option, + credentials: Option, + ) -> Result { + let mut t = match mode { + SmtpMode::Plain => AsyncSmtpTransport::::builder_dangerous(hostname), + SmtpMode::StartTls => AsyncSmtpTransport::::starttls_relay(hostname)?, + SmtpMode::Tls => AsyncSmtpTransport::::relay(hostname)?, + }; + + if let Some(credentials) = credentials { + t = t.credentials(credentials); + } + + if let Some(port) = port { + t = t.port(port.into()); + } + + Ok(Self::new(TransportInner::Smtp(t.build()))) + } + + /// Construct a Sendmail transport + #[must_use] + pub fn sendmail(command: Option>) -> Self { + let transport = if let Some(command) = command { + AsyncSendmailTransport::new_with_command(command) + } else { + AsyncSendmailTransport::new() + }; + Self::new(TransportInner::Sendmail(transport)) + } +} + +impl Transport { + /// Test the connection to the underlying transport. Only works with the + /// SMTP backend for now + /// + /// # Errors + /// + /// Will return `Err` if the connection test failed + pub async fn test_connection(&self) -> Result<(), Error> { + match self.inner.as_ref() { + TransportInner::Smtp(t) => { + t.test_connection().await?; + } + TransportInner::Blackhole | TransportInner::Sendmail(_) => {} + } + + Ok(()) + } +} + +#[derive(Debug, Error)] +#[error(transparent)] +pub enum Error { + Smtp(#[from] lettre::transport::smtp::Error), + Sendmail(#[from] lettre::transport::sendmail::Error), +} + +#[async_trait] +impl AsyncTransport for Transport { + type Ok = (); + type Error = Error; + + async fn send_raw(&self, envelope: &Envelope, email: &[u8]) -> Result { + match self.inner.as_ref() { + TransportInner::Blackhole => { + tracing::warn!( + "An email was supposed to be sent but no email backend is configured" + ); + } + TransportInner::Smtp(t) => { + t.send_raw(envelope, email).await?; + } + TransportInner::Sendmail(t) => { + t.send_raw(envelope, email).await?; + } + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/handlers/Cargo.toml b/matrix-authentication-service/crates/handlers/Cargo.toml new file mode 100644 index 00000000..57f39185 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/Cargo.toml @@ -0,0 +1,97 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-handlers" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +aide.workspace = true +anyhow.workspace = true +argon2.workspace = true +async-graphql.workspace = true +async-trait.workspace = true +axum-extra.workspace = true +axum-macros.workspace = true +axum.workspace = true +base64ct.workspace = true +bcrypt.workspace = true +camino.workspace = true +chrono.workspace = true +elliptic-curve.workspace = true +futures-util.workspace = true +governor.workspace = true +headers.workspace = true +hex.workspace = true +hyper.workspace = true +icu_normalizer.workspace = true +indexmap.workspace = true +lettre.workspace = true +mime.workspace = true +minijinja-contrib.workspace = true +minijinja.workspace = true +opentelemetry-semantic-conventions.workspace = true +opentelemetry.workspace = true +pbkdf2.workspace = true +pkcs8.workspace = true +psl.workspace = true +rand_chacha.workspace = true +rand.workspace = true +reqwest.workspace = true +rustls.workspace = true +schemars.workspace = true +sentry.workspace = true +serde_json.workspace = true +serde_urlencoded.workspace = true +serde_with.workspace = true +serde.workspace = true +sha2.workspace = true +sqlx.workspace = true +thiserror.workspace = true +tokio-util.workspace = true +tokio.workspace = true +tower-http.workspace = true +tower.workspace = true +tracing.workspace = true +ulid.workspace = true +url.workspace = true +zeroize.workspace = true + +mas-axum-utils.workspace = true +mas-config.workspace = true +mas-context.workspace = true +mas-data-model.workspace = true +mas-email.workspace = true +mas-http.workspace = true +mas-i18n.workspace = true +mas-iana.workspace = true +mas-jose.workspace = true +mas-keystore.workspace = true +mas-matrix.workspace = true +mas-oidc-client.workspace = true +mas-policy.workspace = true +mas-router.workspace = true +mas-storage.workspace = true +mas-storage-pg.workspace = true +mas-tasks.workspace = true +mas-templates.workspace = true +oauth2-types.workspace = true +zxcvbn.workspace = true + +[dev-dependencies] +insta.workspace = true +tracing-subscriber.workspace = true +cookie_store.workspace = true +sqlx.workspace = true +wiremock.workspace = true diff --git a/matrix-authentication-service/crates/handlers/src/activity_tracker/bound.rs b/matrix-authentication-service/crates/handlers/src/activity_tracker/bound.rs new file mode 100644 index 00000000..8f7acbdd --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/activity_tracker/bound.rs @@ -0,0 +1,62 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use mas_data_model::{ + BrowserSession, Clock, CompatSession, Session, personal::session::PersonalSession, +}; + +use crate::activity_tracker::ActivityTracker; + +/// An activity tracker with an IP address bound to it. +#[derive(Clone)] +pub struct Bound { + tracker: ActivityTracker, + ip: Option, +} + +impl Bound { + /// Create a new bound activity tracker. + #[must_use] + pub fn new(tracker: ActivityTracker, ip: Option) -> Self { + Self { tracker, ip } + } + + /// Get the IP address bound to this activity tracker. + #[must_use] + pub fn ip(&self) -> Option { + self.ip + } + + /// Record activity in an OAuth 2.0 session. + pub async fn record_oauth2_session(&self, clock: &dyn Clock, session: &Session) { + self.tracker + .record_oauth2_session(clock, session, self.ip) + .await; + } + + /// Record activity in a personal session. + pub async fn record_personal_session(&self, clock: &dyn Clock, session: &PersonalSession) { + self.tracker + .record_personal_session(clock, session, self.ip) + .await; + } + + /// Record activity in a compatibility session. + pub async fn record_compat_session(&self, clock: &dyn Clock, session: &CompatSession) { + self.tracker + .record_compat_session(clock, session, self.ip) + .await; + } + + /// Record activity in a browser session. + pub async fn record_browser_session(&self, clock: &dyn Clock, session: &BrowserSession) { + self.tracker + .record_browser_session(clock, session, self.ip) + .await; + } +} diff --git a/matrix-authentication-service/crates/handlers/src/activity_tracker/mod.rs b/matrix-authentication-service/crates/handlers/src/activity_tracker/mod.rs new file mode 100644 index 00000000..e1c6b976 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/activity_tracker/mod.rs @@ -0,0 +1,239 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod bound; +mod worker; + +use std::net::IpAddr; + +use chrono::{DateTime, Utc}; +use mas_data_model::{ + BrowserSession, Clock, CompatSession, Session, personal::session::PersonalSession, +}; +use mas_storage::BoxRepositoryFactory; +use tokio_util::{sync::CancellationToken, task::TaskTracker}; +use ulid::Ulid; + +pub use self::bound::Bound; +use self::worker::Worker; + +static MESSAGE_QUEUE_SIZE: usize = 1000; + +#[derive(Clone, Copy, Debug, PartialOrd, PartialEq, Eq, Hash)] +enum SessionKind { + OAuth2, + Compat, + /// Session associated with personal access tokens + Personal, + Browser, +} + +impl SessionKind { + const fn as_str(self) -> &'static str { + match self { + SessionKind::OAuth2 => "oauth2", + SessionKind::Compat => "compat", + SessionKind::Personal => "personal", + SessionKind::Browser => "browser", + } + } +} + +enum Message { + Record { + kind: SessionKind, + id: Ulid, + date_time: DateTime, + ip: Option, + }, + Flush(tokio::sync::oneshot::Sender<()>), +} + +#[derive(Clone)] +pub struct ActivityTracker { + channel: tokio::sync::mpsc::Sender, +} + +impl ActivityTracker { + /// Create a new activity tracker + /// + /// It will spawn the background worker and a loop to flush the tracker on + /// the task tracker, and both will shut themselves down, flushing one last + /// time, when the cancellation token is cancelled. + #[must_use] + pub fn new( + repository_factory: BoxRepositoryFactory, + flush_interval: std::time::Duration, + task_tracker: &TaskTracker, + cancellation_token: CancellationToken, + ) -> Self { + let worker = Worker::new(repository_factory); + let (sender, receiver) = tokio::sync::mpsc::channel(MESSAGE_QUEUE_SIZE); + let tracker = ActivityTracker { channel: sender }; + + // Spawn the flush loop and the worker + task_tracker.spawn( + tracker + .clone() + .flush_loop(flush_interval, cancellation_token.clone()), + ); + task_tracker.spawn(worker.run(receiver, cancellation_token)); + + tracker + } + + /// Bind the activity tracker to an IP address. + #[must_use] + pub fn bind(self, ip: Option) -> Bound { + Bound::new(self, ip) + } + + /// Record activity in an OAuth 2.0 session. + pub async fn record_oauth2_session( + &self, + clock: &dyn Clock, + session: &Session, + ip: Option, + ) { + let res = self + .channel + .send(Message::Record { + kind: SessionKind::OAuth2, + id: session.id, + date_time: clock.now(), + ip, + }) + .await; + + if let Err(e) = res { + tracing::error!("Failed to record OAuth2 session: {}", e); + } + } + + /// Record activity in a personal session. + pub async fn record_personal_session( + &self, + clock: &dyn Clock, + session: &PersonalSession, + ip: Option, + ) { + let res = self + .channel + .send(Message::Record { + kind: SessionKind::Personal, + id: session.id, + date_time: clock.now(), + ip, + }) + .await; + + if let Err(e) = res { + tracing::error!("Failed to record Personal session: {}", e); + } + } + + /// Record activity in a compat session. + pub async fn record_compat_session( + &self, + clock: &dyn Clock, + compat_session: &CompatSession, + ip: Option, + ) { + let res = self + .channel + .send(Message::Record { + kind: SessionKind::Compat, + id: compat_session.id, + date_time: clock.now(), + ip, + }) + .await; + + if let Err(e) = res { + tracing::error!("Failed to record compat session: {}", e); + } + } + + /// Record activity in a browser session. + pub async fn record_browser_session( + &self, + clock: &dyn Clock, + browser_session: &BrowserSession, + ip: Option, + ) { + let res = self + .channel + .send(Message::Record { + kind: SessionKind::Browser, + id: browser_session.id, + date_time: clock.now(), + ip, + }) + .await; + + if let Err(e) = res { + tracing::error!("Failed to record browser session: {}", e); + } + } + + /// Manually flush the activity tracker. + pub async fn flush(&self) { + let (tx, rx) = tokio::sync::oneshot::channel(); + let res = self.channel.send(Message::Flush(tx)).await; + + match res { + Ok(()) => { + if let Err(e) = rx.await { + tracing::error!( + error = &e as &dyn std::error::Error, + "Failed to flush activity tracker" + ); + } + } + Err(e) => { + tracing::error!( + error = &e as &dyn std::error::Error, + "Failed to flush activity tracker" + ); + } + } + } + + /// Regularly flush the activity tracker. + async fn flush_loop( + self, + interval: std::time::Duration, + cancellation_token: CancellationToken, + ) { + // This guard on the shutdown token is to ensure that if this task crashes for + // any reason, the server will shut down + let _guard = cancellation_token.clone().drop_guard(); + let mut interval = tokio::time::interval(interval); + interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); + + loop { + tokio::select! { + biased; + + () = cancellation_token.cancelled() => { + // The cancellation token was cancelled, so we should exit + return; + } + + // First check if the channel is closed, then check if the timer expired + () = self.channel.closed() => { + // The channel was closed, so we should exit + return; + } + + + _ = interval.tick() => { + self.flush().await; + } + } + } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/activity_tracker/worker.rs b/matrix-authentication-service/crates/handlers/src/activity_tracker/worker.rs new file mode 100644 index 00000000..9405eab4 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/activity_tracker/worker.rs @@ -0,0 +1,269 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, net::IpAddr}; + +use chrono::{DateTime, Utc}; +use mas_storage::{ + BoxRepositoryFactory, RepositoryAccess, RepositoryError, user::BrowserSessionRepository, +}; +use opentelemetry::{ + Key, KeyValue, + metrics::{Counter, Gauge, Histogram}, +}; +use tokio_util::sync::CancellationToken; +use ulid::Ulid; + +use crate::{ + METER, + activity_tracker::{Message, SessionKind}, +}; + +/// The maximum number of pending activity records before we flush them to the +/// database automatically. +/// +/// The [`ActivityRecord`] structure plus the key in the [`HashMap`] takes less +/// than 100 bytes, so this should allocate around 100kB of memory. +static MAX_PENDING_RECORDS: usize = 1000; + +const TYPE: Key = Key::from_static_str("type"); +const SESSION_KIND: Key = Key::from_static_str("session_kind"); +const RESULT: Key = Key::from_static_str("result"); + +#[derive(Clone, Copy, Debug)] +struct ActivityRecord { + // XXX: We don't actually use the start time for now + #[allow(dead_code)] + start_time: DateTime, + end_time: DateTime, + ip: Option, +} + +/// Handles writing activity records to the database. +pub struct Worker { + repository_factory: BoxRepositoryFactory, + pending_records: HashMap<(SessionKind, Ulid), ActivityRecord>, + pending_records_gauge: Gauge, + message_counter: Counter, + flush_time_histogram: Histogram, +} + +impl Worker { + pub(crate) fn new(repository_factory: BoxRepositoryFactory) -> Self { + let message_counter = METER + .u64_counter("mas.activity_tracker.messages") + .with_description("The number of messages received by the activity tracker") + .with_unit("{messages}") + .build(); + + // Record stuff on the counter so that the metrics are initialized + for kind in &[ + SessionKind::OAuth2, + SessionKind::Compat, + SessionKind::Browser, + ] { + message_counter.add( + 0, + &[ + KeyValue::new(TYPE, "record"), + KeyValue::new(SESSION_KIND, kind.as_str()), + ], + ); + } + message_counter.add(0, &[KeyValue::new(TYPE, "flush")]); + message_counter.add(0, &[KeyValue::new(TYPE, "shutdown")]); + + let flush_time_histogram = METER + .u64_histogram("mas.activity_tracker.flush_time") + .with_description("The time it took to flush the activity tracker") + .with_unit("ms") + .build(); + + let pending_records_gauge = METER + .u64_gauge("mas.activity_tracker.pending_records") + .with_description("The number of pending activity records") + .with_unit("{records}") + .build(); + pending_records_gauge.record(0, &[]); + + Self { + repository_factory, + pending_records: HashMap::with_capacity(MAX_PENDING_RECORDS), + pending_records_gauge, + message_counter, + flush_time_histogram, + } + } + + pub(super) async fn run( + mut self, + mut receiver: tokio::sync::mpsc::Receiver, + cancellation_token: CancellationToken, + ) { + // This guard on the shutdown token is to ensure that if this task crashes for + // any reason, the server will shut down + let _guard = cancellation_token.clone().drop_guard(); + + loop { + let message = tokio::select! { + // Because we want the cancellation token to trigger only once, + // we looked whether we closed the channel or not + () = cancellation_token.cancelled(), if !receiver.is_closed() => { + // We only close the channel, which will make it flush all + // the pending messages + receiver.close(); + tracing::debug!("Shutting down activity tracker"); + continue; + }, + + message = receiver.recv() => { + // We consumed all the messages, break out of the loop + let Some(message) = message else { break }; + message + } + }; + + match message { + Message::Record { + kind, + id, + date_time, + ip, + } => { + if self.pending_records.len() >= MAX_PENDING_RECORDS { + tracing::warn!("Too many pending activity records, flushing"); + self.flush().await; + } + + if self.pending_records.len() >= MAX_PENDING_RECORDS { + tracing::error!( + kind = kind.as_str(), + %id, + %date_time, + "Still too many pending activity records, dropping" + ); + continue; + } + + self.message_counter.add( + 1, + &[ + KeyValue::new(TYPE, "record"), + KeyValue::new(SESSION_KIND, kind.as_str()), + ], + ); + + let record = + self.pending_records + .entry((kind, id)) + .or_insert_with(|| ActivityRecord { + start_time: date_time, + end_time: date_time, + ip, + }); + + record.end_time = date_time.max(record.end_time); + } + + Message::Flush(tx) => { + self.message_counter.add(1, &[KeyValue::new(TYPE, "flush")]); + + self.flush().await; + let _ = tx.send(()); + } + } + + // Update the gauge + self.pending_records_gauge + .record(self.pending_records.len() as u64, &[]); + } + + // Flush one last time + self.flush().await; + } + + /// Flush the activity tracker. + async fn flush(&mut self) { + // Short path: if there are no pending records, we don't need to flush + if self.pending_records.is_empty() { + return; + } + + let start = std::time::Instant::now(); + let res = self.try_flush().await; + + // Measure the time it took to flush the activity tracker + let duration = start.elapsed(); + let duration_ms = duration.as_millis().try_into().unwrap_or(u64::MAX); + + match res { + Ok(()) => { + self.flush_time_histogram + .record(duration_ms, &[KeyValue::new(RESULT, "success")]); + } + Err(e) => { + self.flush_time_histogram + .record(duration_ms, &[KeyValue::new(RESULT, "failure")]); + tracing::error!( + error = &e as &dyn std::error::Error, + "Failed to flush activity tracker" + ); + } + } + } + + /// Fallible part of [`Self::flush`]. + #[tracing::instrument(name = "activity_tracker.flush", skip(self))] + async fn try_flush(&mut self) -> Result<(), RepositoryError> { + let pending_records = &self.pending_records; + let mut repo = self.repository_factory.create().await?; + + let mut browser_sessions = Vec::new(); + let mut oauth2_sessions = Vec::new(); + let mut compat_sessions = Vec::new(); + let mut personal_sessions = Vec::new(); + + for ((kind, id), record) in pending_records { + match kind { + SessionKind::Browser => { + browser_sessions.push((*id, record.end_time, record.ip)); + } + SessionKind::OAuth2 => { + oauth2_sessions.push((*id, record.end_time, record.ip)); + } + SessionKind::Compat => { + compat_sessions.push((*id, record.end_time, record.ip)); + } + SessionKind::Personal => { + personal_sessions.push((*id, record.end_time, record.ip)); + } + } + } + + tracing::info!( + "Flushing {} activity records to the database", + pending_records.len() + ); + + repo.browser_session() + .record_batch_activity(browser_sessions) + .await?; + repo.oauth2_session() + .record_batch_activity(oauth2_sessions) + .await?; + repo.compat_session() + .record_batch_activity(compat_sessions) + .await?; + repo.personal_session() + .record_batch_activity(personal_sessions) + .await?; + + repo.save().await?; + self.pending_records.clear(); + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/call_context.rs b/matrix-authentication-service/crates/handlers/src/admin/call_context.rs new file mode 100644 index 00000000..1cffe682 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/call_context.rs @@ -0,0 +1,319 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::convert::Infallible; + +use aide::OperationIo; +use axum::{ + Json, + extract::FromRequestParts, + response::{IntoResponse, Response}, +}; +use axum_extra::TypedHeader; +use headers::{Authorization, authorization::Bearer}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::{ + BoxClock, Session, TokenFormatError, TokenType, User, + personal::session::{PersonalSession, PersonalSessionOwner}, +}; +use mas_storage::{BoxRepository, RepositoryError}; +use oauth2_types::scope::Scope; +use ulid::Ulid; + +use super::response::ErrorResponse; +use crate::BoundActivityTracker; + +#[derive(Debug, thiserror::Error)] +pub enum Rejection { + /// The authorization header is missing + #[error("Missing authorization header")] + MissingAuthorizationHeader, + + /// The authorization header is invalid + #[error("Invalid authorization header")] + InvalidAuthorizationHeader, + + /// Couldn't load the database repository + #[error("Couldn't load the database repository")] + RepositorySetup(#[source] Box), + + /// A database operation failed + #[error("Invalid repository operation")] + Repository(#[from] RepositoryError), + + /// The access token was not of the correct type for the Admin API + #[error("Invalid type of access token")] + InvalidAccessTokenType(#[from] Option), + + /// The access token could not be found in the database + #[error("Unknown access token")] + UnknownAccessToken, + + /// The access token provided expired + #[error("Access token expired")] + TokenExpired, + + /// The session associated with the access token was revoked + #[error("Access token revoked")] + SessionRevoked, + + /// The user associated with the session is locked + #[error("User locked")] + UserLocked, + + /// Failed to load the session + #[error("Failed to load session {0}")] + LoadSession(Ulid), + + /// Failed to load the user + #[error("Failed to load user {0}")] + LoadUser(Ulid), + + /// The session does not have the `urn:mas:admin` scope + #[error("Missing urn:mas:admin scope")] + MissingScope, +} + +impl IntoResponse for Rejection { + fn into_response(self) -> Response { + let response = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!( + self, + Self::RepositorySetup(_) + | Self::Repository(_) + | Self::LoadSession(_) + | Self::LoadUser(_) + ); + + let status = match &self { + Rejection::InvalidAuthorizationHeader | Rejection::MissingAuthorizationHeader => { + StatusCode::BAD_REQUEST + } + + Rejection::UnknownAccessToken + | Rejection::TokenExpired + | Rejection::SessionRevoked + | Rejection::UserLocked + | Rejection::MissingScope + | Rejection::InvalidAccessTokenType(_) => StatusCode::UNAUTHORIZED, + + Rejection::RepositorySetup(_) + | Rejection::Repository(_) + | Rejection::LoadSession(_) + | Rejection::LoadUser(_) => StatusCode::INTERNAL_SERVER_ERROR, + }; + + (status, sentry_event_id, Json(response)).into_response() + } +} + +/// An extractor which authorizes the request +/// +/// Because we need to load the database repository and the clock, we keep them +/// in the context to avoid creating two instances for each request. +#[non_exhaustive] +#[derive(OperationIo)] +#[aide(input)] +pub struct CallContext { + pub repo: BoxRepository, + pub clock: BoxClock, + pub user: Option, + pub session: CallerSession, +} + +impl FromRequestParts for CallContext +where + S: Send + Sync, + BoundActivityTracker: FromRequestParts, + BoxRepository: FromRequestParts, + BoxClock: FromRequestParts, + >::Rejection: + Into>, +{ + type Rejection = Rejection; + + async fn from_request_parts( + parts: &mut axum::http::request::Parts, + state: &S, + ) -> Result { + let Ok(activity_tracker) = BoundActivityTracker::from_request_parts(parts, state).await; + let Ok(clock) = BoxClock::from_request_parts(parts, state).await; + + // Load the database repository + let mut repo = BoxRepository::from_request_parts(parts, state) + .await + .map_err(Into::into) + .map_err(Rejection::RepositorySetup)?; + + // Extract the access token from the authorization header + let token = TypedHeader::>::from_request_parts(parts, state) + .await + .map_err(|e| { + // We map to two differentsson of errors depending on whether the header is + // missing or invalid + if e.is_missing() { + Rejection::MissingAuthorizationHeader + } else { + Rejection::InvalidAuthorizationHeader + } + })?; + + let token = token.token(); + let token_type = TokenType::check(token)?; + + let session = match token_type { + TokenType::AccessToken => { + // Look for the access token in the database + let token = repo + .oauth2_access_token() + .find_by_token(token) + .await? + .ok_or(Rejection::UnknownAccessToken)?; + + // Look for the associated session in the database + let session = repo + .oauth2_session() + .lookup(token.session_id) + .await? + .ok_or_else(|| Rejection::LoadSession(token.session_id))?; + + if !session.is_valid() { + return Err(Rejection::SessionRevoked); + } + + if !token.is_valid(clock.now()) { + return Err(Rejection::TokenExpired); + } + + // Record the activity on the session + activity_tracker + .record_oauth2_session(&clock, &session) + .await; + + CallerSession::OAuth2Session(session) + } + TokenType::PersonalAccessToken => { + // Look for the access token in the database + let token = repo + .personal_access_token() + .find_by_token(token) + .await? + .ok_or(Rejection::UnknownAccessToken)?; + + // Look for the associated session in the database + let session = repo + .personal_session() + .lookup(token.session_id) + .await? + .ok_or_else(|| Rejection::LoadSession(token.session_id))?; + + if !session.is_valid() { + return Err(Rejection::SessionRevoked); + } + + if !token.is_valid(clock.now()) { + return Err(Rejection::TokenExpired); + } + + // Check the validity of the owner of the personal session + match session.owner { + PersonalSessionOwner::User(owner_user_id) => { + let owner_user = repo + .user() + .lookup(owner_user_id) + .await? + .ok_or_else(|| Rejection::LoadUser(owner_user_id))?; + if !owner_user.is_valid() { + return Err(Rejection::UserLocked); + } + } + PersonalSessionOwner::OAuth2Client(_) => { + // nop: Client owners are always valid + } + } + + // Record the activity on the session + activity_tracker + .record_personal_session(&clock, &session) + .await; + + CallerSession::PersonalSession(session) + } + _other => { + return Err(Rejection::InvalidAccessTokenType(None)); + } + }; + + // Load the user if there is one + let user = if let Some(user_id) = session.user_id() { + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or_else(|| Rejection::LoadUser(user_id))?; + + match session { + CallerSession::OAuth2Session(_) => { + // For OAuth2 sessions: check that the user is valid enough + // to be a user. + if !user.is_valid() { + return Err(Rejection::UserLocked); + } + } + CallerSession::PersonalSession(_) => { + // For personal sessions: check that the actor is valid enough + // to be an actor. + if !user.is_valid_actor() { + return Err(Rejection::UserLocked); + } + } + } + + Some(user) + } else { + // Double check we're not using a PersonalSession + assert!(matches!(session, CallerSession::OAuth2Session(_))); + None + }; + + // For now, we only check that the session has the admin scope + // Later we might want to check other route-specific scopes + if !session.scope().contains("urn:mas:admin") { + return Err(Rejection::MissingScope); + } + + Ok(Self { + repo, + clock, + user, + session, + }) + } +} + +/// The session representing the caller of the Admin API; +/// could either be an OAuth session or a personal session. +pub enum CallerSession { + OAuth2Session(Session), + PersonalSession(PersonalSession), +} + +impl CallerSession { + pub fn scope(&self) -> &Scope { + match self { + CallerSession::OAuth2Session(session) => &session.scope, + CallerSession::PersonalSession(session) => &session.scope, + } + } + + pub fn user_id(&self) -> Option { + match self { + CallerSession::OAuth2Session(session) => session.user_id, + CallerSession::PersonalSession(session) => Some(session.actor_user_id), + } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/mod.rs new file mode 100644 index 00000000..cbb23edb --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/mod.rs @@ -0,0 +1,251 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use aide::{ + axum::ApiRouter, + openapi::{OAuth2Flow, OAuth2Flows, OpenApi, SecurityScheme, Server, Tag}, + transform::TransformOpenApi, +}; +use axum::{ + Json, Router, + extract::{FromRef, FromRequestParts, State}, + http::HeaderName, + response::Html, +}; +use hyper::header::{ACCEPT, AUTHORIZATION, CONTENT_TYPE}; +use indexmap::IndexMap; +use mas_axum_utils::InternalError; +use mas_data_model::{AppVersion, BoxRng, SiteConfig}; +use mas_http::CorsLayerExt; +use mas_matrix::HomeserverConnection; +use mas_policy::PolicyFactory; +use mas_router::{ + ApiDoc, ApiDocCallback, OAuth2AuthorizationEndpoint, OAuth2TokenEndpoint, Route, SimpleRoute, + UrlBuilder, +}; +use mas_templates::{ApiDocContext, Templates}; +use schemars::transform::AddNullable; +use tower_http::cors::{Any, CorsLayer}; + +mod call_context; +mod model; +mod params; +mod response; +mod schema; +mod v1; + +use self::call_context::CallContext; +use crate::passwords::PasswordManager; + +fn finish(t: TransformOpenApi) -> TransformOpenApi { + t.title("Matrix Authentication Service admin API") + .tag(Tag { + name: "server".to_owned(), + description: Some("Information about the server".to_owned()), + ..Tag::default() + }) + .tag(Tag { + name: "compat-session".to_owned(), + description: Some("Manage compatibility sessions from legacy clients".to_owned()), + ..Tag::default() + }) + .tag(Tag { + name: "policy-data".to_owned(), + description: Some("Manage the dynamic policy data".to_owned()), + ..Tag::default() + }) + .tag(Tag { + name: "oauth2-session".to_owned(), + description: Some("Manage OAuth2 sessions".to_owned()), + ..Tag::default() + }) + .tag(Tag { + name: "user".to_owned(), + description: Some("Manage users".to_owned()), + ..Tag::default() + }) + .tag(Tag { + name: "user-email".to_owned(), + description: Some("Manage emails associated with users".to_owned()), + ..Tag::default() + }) + .tag(Tag { + name: "user-session".to_owned(), + description: Some("Manage browser sessions of users".to_owned()), + ..Tag::default() + }) + .tag(Tag { + name: "user-registration-token".to_owned(), + description: Some("Manage user registration tokens".to_owned()), + ..Tag::default() + }) + .tag(Tag { + name: "upstream-oauth-link".to_owned(), + description: Some( + "Manage links between local users and identities from upstream OAuth 2.0 providers" + .to_owned(), + ), + ..Default::default() + }) + .tag(Tag { + name: "upstream-oauth-provider".to_owned(), + description: Some("Manage upstream OAuth 2.0 providers".to_owned()), + ..Tag::default() + }) + .security_scheme("oauth2", oauth_security_scheme(None)) + .security_scheme( + "token", + SecurityScheme::Http { + scheme: "bearer".to_owned(), + bearer_format: None, + description: Some("An access token with access to the admin API".to_owned()), + extensions: IndexMap::default(), + }, + ) + .security_requirement_scopes("oauth2", ["urn:mas:admin"]) + .security_requirement_scopes("bearer", ["urn:mas:admin"]) +} + +fn oauth_security_scheme(url_builder: Option<&UrlBuilder>) -> SecurityScheme { + let (authorization_url, token_url) = if let Some(url_builder) = url_builder { + ( + url_builder.oauth_authorization_endpoint().to_string(), + url_builder.oauth_token_endpoint().to_string(), + ) + } else { + // This is a dirty fix for Swagger UI: when it joins the URLs with the + // base URL, if the path starts with a slash, it will go to the root of + // the domain instead of the API root. + // It works if we make it explicitly relative + ( + format!(".{}", OAuth2AuthorizationEndpoint::PATH), + format!(".{}", OAuth2TokenEndpoint::PATH), + ) + }; + + let scopes = IndexMap::from([( + "urn:mas:admin".to_owned(), + "Grant access to the admin API".to_owned(), + )]); + + SecurityScheme::OAuth2 { + flows: OAuth2Flows { + client_credentials: Some(OAuth2Flow::ClientCredentials { + refresh_url: Some(token_url.clone()), + token_url: token_url.clone(), + scopes: scopes.clone(), + }), + authorization_code: Some(OAuth2Flow::AuthorizationCode { + authorization_url, + refresh_url: Some(token_url.clone()), + token_url, + scopes, + }), + implicit: None, + password: None, + }, + description: None, + extensions: IndexMap::default(), + } +} + +pub fn router() -> (OpenApi, Router) +where + S: Clone + Send + Sync + 'static, + Arc: FromRef, + PasswordManager: FromRef, + BoxRng: FromRequestParts, + CallContext: FromRequestParts, + Templates: FromRef, + UrlBuilder: FromRef, + Arc: FromRef, + SiteConfig: FromRef, + AppVersion: FromRef, +{ + // We *always* want to explicitly set the possible responses, beacuse the + // infered ones are not necessarily correct + aide::generate::infer_responses(false); + + aide::generate::in_context(|ctx| { + ctx.schema = schemars::generate::SchemaGenerator::new( + schemars::generate::SchemaSettings::openapi3().with(|settings| { + // Remove the transform which adds nullable fields, as it's not + // valid with OpenAPI 3.1. For some reason, aide/schemars output + // an OpenAPI 3.1 schema with this nullable transform. + settings + .transforms + .retain(|transform| !transform.is::()); + }), + ); + }); + + let mut api = OpenApi::default(); + let router = ApiRouter::::new() + .nest("/api/admin/v1", self::v1::router()) + .finish_api_with(&mut api, finish); + + let router = router + // Serve the OpenAPI spec as JSON + .route( + "/api/spec.json", + axum::routing::get({ + let api = api.clone(); + move |State(url_builder): State| { + // Let's set the servers to the HTTP base URL + let mut api = api.clone(); + + let _ = TransformOpenApi::new(&mut api) + .server(Server { + url: url_builder.http_base().to_string(), + ..Server::default() + }) + .security_scheme("oauth2", oauth_security_scheme(Some(&url_builder))); + + std::future::ready(Json(api)) + } + }), + ) + // Serve the Swagger API reference + .route(ApiDoc::route(), axum::routing::get(swagger)) + .route( + ApiDocCallback::route(), + axum::routing::get(swagger_callback), + ) + .layer( + CorsLayer::new() + .allow_origin(Any) + .allow_methods(Any) + .allow_otel_headers([ + AUTHORIZATION, + ACCEPT, + CONTENT_TYPE, + // Swagger will send this header, so we have to allow it to avoid CORS errors + HeaderName::from_static("x-requested-with"), + ]), + ); + + (api, router) +} + +async fn swagger( + State(url_builder): State, + State(templates): State, +) -> Result, InternalError> { + let ctx = ApiDocContext::from_url_builder(&url_builder); + let res = templates.render_swagger(&ctx)?; + Ok(Html(res)) +} + +async fn swagger_callback( + State(url_builder): State, + State(templates): State, +) -> Result, InternalError> { + let ctx = ApiDocContext::from_url_builder(&url_builder); + let res = templates.render_swagger_callback(&ctx)?; + Ok(Html(res)) +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/model.rs b/matrix-authentication-service/crates/handlers/src/admin/model.rs new file mode 100644 index 00000000..7936c02f --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/model.rs @@ -0,0 +1,956 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use chrono::{DateTime, Utc}; +use mas_data_model::{ + Device, + personal::{ + PersonalAccessToken as DataModelPersonalAccessToken, + session::{PersonalSession as DataModelPersonalSession, PersonalSessionOwner}, + }, +}; +use schemars::JsonSchema; +use serde::Serialize; +use thiserror::Error; +use ulid::Ulid; +use url::Url; + +/// A resource, with a type and an ID +pub trait Resource { + /// The type of the resource + const KIND: &'static str; + + /// The canonical path prefix for this kind of resource + const PATH: &'static str; + + /// The ID of the resource + fn id(&self) -> Ulid; + + /// The canonical path for this resource + /// + /// This is the concatenation of the canonical path prefix and the ID + fn path(&self) -> String { + format!("{}/{}", Self::PATH, self.id()) + } +} + +/// A user +#[derive(Serialize, JsonSchema)] +pub struct User { + #[serde(skip)] + id: Ulid, + + /// The username (localpart) of the user + username: String, + + /// When the user was created + created_at: DateTime, + + /// When the user was locked. If null, the user is not locked. + locked_at: Option>, + + /// When the user was deactivated. If null, the user is not deactivated. + deactivated_at: Option>, + + /// Whether the user can request admin privileges. + admin: bool, + + /// Whether the user was a guest before migrating to MAS, + legacy_guest: bool, +} + +impl User { + /// Samples of users with different properties for examples in the schema + pub fn samples() -> [Self; 3] { + [ + Self { + id: Ulid::from_bytes([0x01; 16]), + username: "alice".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + admin: false, + legacy_guest: false, + }, + Self { + id: Ulid::from_bytes([0x02; 16]), + username: "bob".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + admin: true, + legacy_guest: false, + }, + Self { + id: Ulid::from_bytes([0x03; 16]), + username: "charlie".to_owned(), + created_at: DateTime::default(), + locked_at: Some(DateTime::default()), + deactivated_at: None, + admin: false, + legacy_guest: true, + }, + ] + } +} + +impl From for User { + fn from(user: mas_data_model::User) -> Self { + Self { + id: user.id, + username: user.username, + created_at: user.created_at, + locked_at: user.locked_at, + deactivated_at: user.deactivated_at, + admin: user.can_request_admin, + legacy_guest: user.is_guest, + } + } +} + +impl Resource for User { + const KIND: &'static str = "user"; + const PATH: &'static str = "/api/admin/v1/users"; + + fn id(&self) -> Ulid { + self.id + } +} + +/// An email address for a user +#[derive(Serialize, JsonSchema)] +pub struct UserEmail { + #[serde(skip)] + id: Ulid, + + /// When the object was created + created_at: DateTime, + + /// The ID of the user who owns this email address + #[schemars(with = "super::schema::Ulid")] + user_id: Ulid, + + /// The email address + email: String, +} + +impl Resource for UserEmail { + const KIND: &'static str = "user-email"; + const PATH: &'static str = "/api/admin/v1/user-emails"; + + fn id(&self) -> Ulid { + self.id + } +} + +impl From for UserEmail { + fn from(value: mas_data_model::UserEmail) -> Self { + Self { + id: value.id, + created_at: value.created_at, + user_id: value.user_id, + email: value.email, + } + } +} + +impl UserEmail { + pub fn samples() -> [Self; 1] { + [Self { + id: Ulid::from_bytes([0x01; 16]), + created_at: DateTime::default(), + user_id: Ulid::from_bytes([0x02; 16]), + email: "alice@example.com".to_owned(), + }] + } +} + +/// A compatibility session for legacy clients +#[derive(Serialize, JsonSchema)] +pub struct CompatSession { + #[serde(skip)] + pub id: Ulid, + + /// The ID of the user that owns this session + #[schemars(with = "super::schema::Ulid")] + pub user_id: Ulid, + + /// The Matrix device ID of this session + #[schemars(with = "super::schema::Device")] + pub device_id: Option, + + /// The ID of the user session that started this session, if any + #[schemars(with = "super::schema::Ulid")] + pub user_session_id: Option, + + /// The redirect URI used to login in the client, if it was an SSO login + pub redirect_uri: Option, + + /// The time this session was created + pub created_at: DateTime, + + /// The user agent string that started this session, if any + pub user_agent: Option, + + /// The time this session was last active + pub last_active_at: Option>, + + /// The last IP address recorded for this session + pub last_active_ip: Option, + + /// The time this session was finished + pub finished_at: Option>, + + /// The user-provided name, if any + pub human_name: Option, +} + +impl + From<( + mas_data_model::CompatSession, + Option, + )> for CompatSession +{ + fn from( + (session, sso_login): ( + mas_data_model::CompatSession, + Option, + ), + ) -> Self { + let finished_at = session.finished_at(); + Self { + id: session.id, + user_id: session.user_id, + device_id: session.device, + user_session_id: session.user_session_id, + redirect_uri: sso_login.map(|sso| sso.redirect_uri), + created_at: session.created_at, + user_agent: session.user_agent, + last_active_at: session.last_active_at, + last_active_ip: session.last_active_ip, + finished_at, + human_name: session.human_name, + } + } +} + +impl Resource for CompatSession { + const KIND: &'static str = "compat-session"; + const PATH: &'static str = "/api/admin/v1/compat-sessions"; + + fn id(&self) -> Ulid { + self.id + } +} + +impl CompatSession { + pub fn samples() -> [Self; 3] { + [ + Self { + id: Ulid::from_bytes([0x01; 16]), + user_id: Ulid::from_bytes([0x01; 16]), + device_id: Some("AABBCCDDEE".to_owned().into()), + user_session_id: Some(Ulid::from_bytes([0x11; 16])), + redirect_uri: Some("https://example.com/redirect".parse().unwrap()), + created_at: DateTime::default(), + user_agent: Some("Mozilla/5.0".to_owned()), + last_active_at: Some(DateTime::default()), + last_active_ip: Some([1, 2, 3, 4].into()), + finished_at: None, + human_name: Some("Laptop".to_owned()), + }, + Self { + id: Ulid::from_bytes([0x02; 16]), + user_id: Ulid::from_bytes([0x01; 16]), + device_id: Some("FFGGHHIIJJ".to_owned().into()), + user_session_id: Some(Ulid::from_bytes([0x12; 16])), + redirect_uri: None, + created_at: DateTime::default(), + user_agent: Some("Mozilla/5.0".to_owned()), + last_active_at: Some(DateTime::default()), + last_active_ip: Some([1, 2, 3, 4].into()), + finished_at: Some(DateTime::default()), + human_name: None, + }, + Self { + id: Ulid::from_bytes([0x03; 16]), + user_id: Ulid::from_bytes([0x01; 16]), + device_id: None, + user_session_id: None, + redirect_uri: None, + created_at: DateTime::default(), + user_agent: None, + last_active_at: None, + last_active_ip: None, + finished_at: None, + human_name: None, + }, + ] + } +} + +/// A OAuth 2.0 session +#[derive(Serialize, JsonSchema)] +pub struct OAuth2Session { + #[serde(skip)] + id: Ulid, + + /// When the object was created + created_at: DateTime, + + /// When the session was finished + finished_at: Option>, + + /// The ID of the user who owns the session + #[schemars(with = "Option")] + user_id: Option, + + /// The ID of the browser session which started this session + #[schemars(with = "Option")] + user_session_id: Option, + + /// The ID of the client which requested this session + #[schemars(with = "super::schema::Ulid")] + client_id: Ulid, + + /// The scope granted for this session + scope: String, + + /// The user agent string of the client which started this session + user_agent: Option, + + /// The last time the session was active + last_active_at: Option>, + + /// The last IP address used by the session + last_active_ip: Option, + + /// The user-provided name, if any + human_name: Option, +} + +impl From for OAuth2Session { + fn from(session: mas_data_model::Session) -> Self { + Self { + id: session.id, + created_at: session.created_at, + finished_at: session.finished_at(), + user_id: session.user_id, + user_session_id: session.user_session_id, + client_id: session.client_id, + scope: session.scope.to_string(), + user_agent: session.user_agent, + last_active_at: session.last_active_at, + last_active_ip: session.last_active_ip, + human_name: session.human_name, + } + } +} + +impl OAuth2Session { + /// Samples of OAuth 2.0 sessions + pub fn samples() -> [Self; 3] { + [ + Self { + id: Ulid::from_bytes([0x01; 16]), + created_at: DateTime::default(), + finished_at: None, + user_id: Some(Ulid::from_bytes([0x02; 16])), + user_session_id: Some(Ulid::from_bytes([0x03; 16])), + client_id: Ulid::from_bytes([0x04; 16]), + scope: "openid".to_owned(), + user_agent: Some("Mozilla/5.0".to_owned()), + last_active_at: Some(DateTime::default()), + last_active_ip: Some("127.0.0.1".parse().unwrap()), + human_name: Some("Laptop".to_owned()), + }, + Self { + id: Ulid::from_bytes([0x02; 16]), + created_at: DateTime::default(), + finished_at: None, + user_id: None, + user_session_id: None, + client_id: Ulid::from_bytes([0x05; 16]), + scope: "urn:mas:admin".to_owned(), + user_agent: None, + last_active_at: None, + last_active_ip: None, + human_name: None, + }, + Self { + id: Ulid::from_bytes([0x03; 16]), + created_at: DateTime::default(), + finished_at: Some(DateTime::default()), + user_id: Some(Ulid::from_bytes([0x04; 16])), + user_session_id: Some(Ulid::from_bytes([0x05; 16])), + client_id: Ulid::from_bytes([0x06; 16]), + scope: "urn:matrix:client:api:*".to_owned(), + user_agent: Some("Mozilla/5.0".to_owned()), + last_active_at: Some(DateTime::default()), + last_active_ip: Some("127.0.0.1".parse().unwrap()), + human_name: None, + }, + ] + } +} + +impl Resource for OAuth2Session { + const KIND: &'static str = "oauth2-session"; + const PATH: &'static str = "/api/admin/v1/oauth2-sessions"; + + fn id(&self) -> Ulid { + self.id + } +} + +/// The browser (cookie) session for a user +#[derive(Serialize, JsonSchema)] +pub struct UserSession { + #[serde(skip)] + id: Ulid, + + /// When the object was created + created_at: DateTime, + + /// When the session was finished + finished_at: Option>, + + /// The ID of the user who owns the session + #[schemars(with = "super::schema::Ulid")] + user_id: Ulid, + + /// The user agent string of the client which started this session + user_agent: Option, + + /// The last time the session was active + last_active_at: Option>, + + /// The last IP address used by the session + last_active_ip: Option, +} + +impl From for UserSession { + fn from(value: mas_data_model::BrowserSession) -> Self { + Self { + id: value.id, + created_at: value.created_at, + finished_at: value.finished_at, + user_id: value.user.id, + user_agent: value.user_agent, + last_active_at: value.last_active_at, + last_active_ip: value.last_active_ip, + } + } +} + +impl UserSession { + /// Samples of user sessions + pub fn samples() -> [Self; 3] { + [ + Self { + id: Ulid::from_bytes([0x01; 16]), + created_at: DateTime::default(), + finished_at: None, + user_id: Ulid::from_bytes([0x02; 16]), + user_agent: Some("Mozilla/5.0".to_owned()), + last_active_at: Some(DateTime::default()), + last_active_ip: Some("127.0.0.1".parse().unwrap()), + }, + Self { + id: Ulid::from_bytes([0x02; 16]), + created_at: DateTime::default(), + finished_at: None, + user_id: Ulid::from_bytes([0x03; 16]), + user_agent: None, + last_active_at: None, + last_active_ip: None, + }, + Self { + id: Ulid::from_bytes([0x03; 16]), + created_at: DateTime::default(), + finished_at: Some(DateTime::default()), + user_id: Ulid::from_bytes([0x04; 16]), + user_agent: Some("Mozilla/5.0".to_owned()), + last_active_at: Some(DateTime::default()), + last_active_ip: Some("127.0.0.1".parse().unwrap()), + }, + ] + } +} + +impl Resource for UserSession { + const KIND: &'static str = "user-session"; + const PATH: &'static str = "/api/admin/v1/user-sessions"; + + fn id(&self) -> Ulid { + self.id + } +} + +/// An upstream OAuth 2.0 link +#[derive(Serialize, JsonSchema)] +pub struct UpstreamOAuthLink { + #[serde(skip)] + id: Ulid, + + /// When the object was created + created_at: DateTime, + + /// The ID of the provider + #[schemars(with = "super::schema::Ulid")] + provider_id: Ulid, + + /// The subject of the upstream account, unique per provider + subject: String, + + /// The ID of the user who owns this link, if any + #[schemars(with = "Option")] + user_id: Option, + + /// A human-readable name of the upstream account + human_account_name: Option, +} + +impl Resource for UpstreamOAuthLink { + const KIND: &'static str = "upstream-oauth-link"; + const PATH: &'static str = "/api/admin/v1/upstream-oauth-links"; + + fn id(&self) -> Ulid { + self.id + } +} + +impl From for UpstreamOAuthLink { + fn from(value: mas_data_model::UpstreamOAuthLink) -> Self { + Self { + id: value.id, + created_at: value.created_at, + provider_id: value.provider_id, + subject: value.subject, + user_id: value.user_id, + human_account_name: value.human_account_name, + } + } +} + +impl UpstreamOAuthLink { + /// Samples of upstream OAuth 2.0 links + pub fn samples() -> [Self; 3] { + [ + Self { + id: Ulid::from_bytes([0x01; 16]), + created_at: DateTime::default(), + provider_id: Ulid::from_bytes([0x02; 16]), + subject: "john-42".to_owned(), + user_id: Some(Ulid::from_bytes([0x03; 16])), + human_account_name: Some("john.doe@example.com".to_owned()), + }, + Self { + id: Ulid::from_bytes([0x02; 16]), + created_at: DateTime::default(), + provider_id: Ulid::from_bytes([0x03; 16]), + subject: "jane-123".to_owned(), + user_id: None, + human_account_name: None, + }, + Self { + id: Ulid::from_bytes([0x03; 16]), + created_at: DateTime::default(), + provider_id: Ulid::from_bytes([0x04; 16]), + subject: "bob@social.example.com".to_owned(), + user_id: Some(Ulid::from_bytes([0x05; 16])), + human_account_name: Some("bob".to_owned()), + }, + ] + } +} + +/// The policy data +#[derive(Serialize, JsonSchema)] +pub struct PolicyData { + #[serde(skip)] + id: Ulid, + + /// The creation date of the policy data + created_at: DateTime, + + /// The policy data content + data: serde_json::Value, +} + +impl From for PolicyData { + fn from(policy_data: mas_data_model::PolicyData) -> Self { + Self { + id: policy_data.id, + created_at: policy_data.created_at, + data: policy_data.data, + } + } +} + +impl Resource for PolicyData { + const KIND: &'static str = "policy-data"; + const PATH: &'static str = "/api/admin/v1/policy-data"; + + fn id(&self) -> Ulid { + self.id + } +} + +impl PolicyData { + /// Samples of policy data + pub fn samples() -> [Self; 1] { + [Self { + id: Ulid::from_bytes([0x01; 16]), + created_at: DateTime::default(), + data: serde_json::json!({ + "hello": "world", + "foo": 42, + "bar": true + }), + }] + } +} + +/// A registration token +#[derive(Serialize, JsonSchema)] +pub struct UserRegistrationToken { + #[serde(skip)] + id: Ulid, + + /// The token string + token: String, + + /// Whether the token is valid + valid: bool, + + /// Maximum number of times this token can be used + usage_limit: Option, + + /// Number of times this token has been used + times_used: u32, + + /// When the token was created + created_at: DateTime, + + /// When the token was last used. If null, the token has never been used. + last_used_at: Option>, + + /// When the token expires. If null, the token never expires. + expires_at: Option>, + + /// When the token was revoked. If null, the token is not revoked. + revoked_at: Option>, +} + +impl UserRegistrationToken { + pub fn new(token: mas_data_model::UserRegistrationToken, now: DateTime) -> Self { + Self { + id: token.id, + valid: token.is_valid(now), + token: token.token, + usage_limit: token.usage_limit, + times_used: token.times_used, + created_at: token.created_at, + last_used_at: token.last_used_at, + expires_at: token.expires_at, + revoked_at: token.revoked_at, + } + } +} + +impl Resource for UserRegistrationToken { + const KIND: &'static str = "user-registration_token"; + const PATH: &'static str = "/api/admin/v1/user-registration-tokens"; + + fn id(&self) -> Ulid { + self.id + } +} + +impl UserRegistrationToken { + /// Samples of registration tokens + pub fn samples() -> [Self; 2] { + [ + Self { + id: Ulid::from_bytes([0x01; 16]), + token: "abc123def456".to_owned(), + valid: true, + usage_limit: Some(10), + times_used: 5, + created_at: DateTime::default(), + last_used_at: Some(DateTime::default()), + expires_at: Some(DateTime::default() + chrono::Duration::days(30)), + revoked_at: None, + }, + Self { + id: Ulid::from_bytes([0x02; 16]), + token: "xyz789abc012".to_owned(), + valid: false, + usage_limit: None, + times_used: 0, + created_at: DateTime::default(), + last_used_at: None, + expires_at: None, + revoked_at: Some(DateTime::default()), + }, + ] + } +} + +/// An upstream OAuth 2.0 provider +#[derive(Serialize, JsonSchema)] +pub struct UpstreamOAuthProvider { + #[serde(skip)] + id: Ulid, + + /// The OIDC issuer of the provider + issuer: Option, + + /// A human-readable name for the provider + human_name: Option, + + /// A brand identifier, e.g. "apple" or "google" + brand_name: Option, + + /// When the provider was created + created_at: DateTime, + + /// When the provider was disabled. If null, the provider is enabled. + disabled_at: Option>, +} + +impl From for UpstreamOAuthProvider { + fn from(provider: mas_data_model::UpstreamOAuthProvider) -> Self { + Self { + id: provider.id, + issuer: provider.issuer, + human_name: provider.human_name, + brand_name: provider.brand_name, + created_at: provider.created_at, + disabled_at: provider.disabled_at, + } + } +} + +impl Resource for UpstreamOAuthProvider { + const KIND: &'static str = "upstream-oauth-provider"; + const PATH: &'static str = "/api/admin/v1/upstream-oauth-providers"; + + fn id(&self) -> Ulid { + self.id + } +} + +impl UpstreamOAuthProvider { + /// Samples of upstream OAuth 2.0 providers + pub fn samples() -> [Self; 3] { + [ + Self { + id: Ulid::from_bytes([0x01; 16]), + issuer: Some("https://accounts.google.com".to_owned()), + human_name: Some("Google".to_owned()), + brand_name: Some("google".to_owned()), + created_at: DateTime::default(), + disabled_at: None, + }, + Self { + id: Ulid::from_bytes([0x02; 16]), + issuer: Some("https://appleid.apple.com".to_owned()), + human_name: Some("Apple ID".to_owned()), + brand_name: Some("apple".to_owned()), + created_at: DateTime::default(), + disabled_at: Some(DateTime::default()), + }, + Self { + id: Ulid::from_bytes([0x03; 16]), + issuer: None, + human_name: Some("Custom OAuth Provider".to_owned()), + brand_name: None, + created_at: DateTime::default(), + disabled_at: None, + }, + ] + } +} + +/// An error that shouldn't happen in practice, but suggests database +/// inconsistency. +#[derive(Debug, Error)] +#[error( + "personal session {session_id} in inconsistent state: not revoked but no valid access token" +)] +pub struct InconsistentPersonalSession { + pub session_id: Ulid, +} + +// Note: we don't expose a separate concept of personal access tokens to the +// admin API; we merge the relevant attributes into the personal session. +/// A personal session (session using personal access tokens) +#[derive(Serialize, JsonSchema)] +pub struct PersonalSession { + #[serde(skip)] + id: Ulid, + + /// When the session was created + created_at: DateTime, + + /// When the session was revoked, if applicable + revoked_at: Option>, + + /// The ID of the user who owns this session (if user-owned) + #[schemars(with = "Option")] + owner_user_id: Option, + + /// The ID of the `OAuth2` client that owns this session (if client-owned) + #[schemars(with = "Option")] + owner_client_id: Option, + + /// The ID of the user that the session acts on behalf of + #[schemars(with = "super::schema::Ulid")] + actor_user_id: Ulid, + + /// Human-readable name for the session + human_name: String, + + /// `OAuth2` scopes for this session + scope: String, + + /// When the session was last active + last_active_at: Option>, + + /// IP address of last activity + last_active_ip: Option, + + /// When the current token for this session expires. + /// The session will need to be regenerated, producing a new access token, + /// after this time. + /// None if the current token won't expire or if the session is revoked. + expires_at: Option>, + + /// The actual access token (only returned on creation) + #[serde(skip_serializing_if = "Option::is_none")] + access_token: Option, +} + +impl + TryFrom<( + DataModelPersonalSession, + Option, + )> for PersonalSession +{ + type Error = InconsistentPersonalSession; + + fn try_from( + (session, token): ( + DataModelPersonalSession, + Option, + ), + ) -> Result { + let expires_at = if let Some(token) = token { + token.expires_at + } else { + if !session.is_revoked() { + // No active token, but the session is not revoked. + return Err(InconsistentPersonalSession { + session_id: session.id, + }); + } + None + }; + + let (owner_user_id, owner_client_id) = match session.owner { + PersonalSessionOwner::User(id) => (Some(id), None), + PersonalSessionOwner::OAuth2Client(id) => (None, Some(id)), + }; + + Ok(Self { + id: session.id, + created_at: session.created_at, + revoked_at: session.revoked_at(), + owner_user_id, + owner_client_id, + actor_user_id: session.actor_user_id, + human_name: session.human_name, + scope: session.scope.to_string(), + last_active_at: session.last_active_at, + last_active_ip: session.last_active_ip, + expires_at, + // If relevant, the caller will populate using `with_token` afterwards. + access_token: None, + }) + } +} + +impl Resource for PersonalSession { + const KIND: &'static str = "personal-session"; + const PATH: &'static str = "/api/admin/v1/personal-sessions"; + + fn id(&self) -> Ulid { + self.id + } +} + +impl PersonalSession { + /// Sample personal sessions for documentation/testing + pub fn samples() -> [Self; 3] { + [ + Self { + id: Ulid::from_string("01FSHN9AG0AJ6AC5HQ9X6H4RP4").unwrap(), + created_at: DateTime::from_timestamp(1_642_338_000, 0).unwrap(), /* 2022-01-16T14: + * 40:00Z */ + revoked_at: None, + owner_user_id: Some(Ulid::from_string("01FSHN9AG0MZAA6S4AF7CTV32E").unwrap()), + owner_client_id: None, + actor_user_id: Ulid::from_string("01FSHN9AG0MZAA6S4AF7CTV32E").unwrap(), + human_name: "Alice's Development Token".to_owned(), + scope: "openid urn:matrix:org.matrix.msc2967.client:api:*".to_owned(), + last_active_at: Some(DateTime::from_timestamp(1_642_347_000, 0).unwrap()), /* 2022-01-16T17:10:00Z */ + last_active_ip: Some("192.168.1.100".parse().unwrap()), + expires_at: None, + access_token: None, + }, + Self { + id: Ulid::from_string("01FSHN9AG0BJ6AC5HQ9X6H4RP5").unwrap(), + created_at: DateTime::from_timestamp(1_642_338_060, 0).unwrap(), /* 2022-01-16T14: + * 41:00Z */ + revoked_at: Some(DateTime::from_timestamp(1_642_350_000, 0).unwrap()), /* 2022-01-16T18:00:00Z */ + owner_user_id: Some(Ulid::from_string("01FSHN9AG0NZAA6S4AF7CTV32F").unwrap()), + owner_client_id: None, + actor_user_id: Ulid::from_string("01FSHN9AG0NZAA6S4AF7CTV32F").unwrap(), + human_name: "Bob's Mobile App".to_owned(), + scope: "openid".to_owned(), + last_active_at: Some(DateTime::from_timestamp(1_642_349_000, 0).unwrap()), /* 2022-01-16T17:43:20Z */ + last_active_ip: Some("10.0.0.50".parse().unwrap()), + expires_at: None, + access_token: None, + }, + Self { + id: Ulid::from_string("01FSHN9AG0CJ6AC5HQ9X6H4RP6").unwrap(), + created_at: DateTime::from_timestamp(1_642_338_120, 0).unwrap(), /* 2022-01-16T14: + * 42:00Z */ + revoked_at: None, + owner_user_id: None, + owner_client_id: Some(Ulid::from_string("01FSHN9AG0DJ6AC5HQ9X6H4RP7").unwrap()), + actor_user_id: Ulid::from_string("01FSHN9AG0MZAA6S4AF7CTV32E").unwrap(), + human_name: "CI/CD Pipeline Token".to_owned(), + scope: "openid urn:mas:admin".to_owned(), + last_active_at: Some(DateTime::from_timestamp(1_642_348_000, 0).unwrap()), /* 2022-01-16T17:26:40Z */ + last_active_ip: Some("203.0.113.10".parse().unwrap()), + expires_at: Some(DateTime::from_timestamp(1_642_999_000, 0).unwrap()), + access_token: None, + }, + ] + } + + /// Add the actual token value (for use in creation responses) + pub fn with_token(mut self, access_token: String) -> Self { + self.access_token = Some(access_token); + self + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/params.rs b/matrix-authentication-service/crates/handlers/src/admin/params.rs new file mode 100644 index 00000000..4b1ccb1d --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/params.rs @@ -0,0 +1,173 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// Generated code from schemars violates this rule +#![allow(clippy::str_to_string)] + +use std::{borrow::Cow, num::NonZeroUsize}; + +use aide::OperationIo; +use axum::{ + Json, + extract::{FromRequestParts, Path, rejection::PathRejection}, + response::IntoResponse, +}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use hyper::StatusCode; +use mas_storage::pagination::PaginationDirection; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use super::response::ErrorResponse; + +#[derive(Debug, thiserror::Error)] +#[error("Invalid ULID in path")] +pub struct UlidPathParamRejection(#[from] PathRejection); + +impl IntoResponse for UlidPathParamRejection { + fn into_response(self) -> axum::response::Response { + ( + StatusCode::BAD_REQUEST, + Json(ErrorResponse::from_error(&self)), + ) + .into_response() + } +} + +#[derive(JsonSchema, Debug, Clone, Copy, Deserialize)] +struct UlidInPath { + /// # The ID of the resource + #[schemars(with = "super::schema::Ulid")] + id: Ulid, +} + +#[derive(FromRequestParts, OperationIo, Debug, Clone, Copy)] +#[from_request(rejection(UlidPathParamRejection))] +#[aide(input_with = "Path")] +pub struct UlidPathParam(#[from_request(via(Path))] UlidInPath); + +impl std::ops::Deref for UlidPathParam { + type Target = Ulid; + + fn deref(&self) -> &Self::Target { + &self.0.id + } +} + +/// The default page size if not specified +const DEFAULT_PAGE_SIZE: usize = 10; + +#[derive(Deserialize, JsonSchema, Clone, Copy, Default, Debug)] +pub enum IncludeCount { + /// Include the total number of items (default) + #[default] + #[serde(rename = "true")] + True, + + /// Do not include the total number of items + #[serde(rename = "false")] + False, + + /// Only include the total number of items, skip the items themselves + #[serde(rename = "only")] + Only, +} + +impl IncludeCount { + pub(crate) fn add_to_base(self, base: &str) -> Cow<'_, str> { + let separator = if base.contains('?') { '&' } else { '?' }; + match self { + // This is the default, don't add anything + Self::True => Cow::Borrowed(base), + Self::False => format!("{base}{separator}count=false").into(), + Self::Only => format!("{base}{separator}count=only").into(), + } + } +} + +#[derive(Deserialize, JsonSchema, Clone, Copy)] +struct PaginationParams { + /// Retrieve the items before the given ID + #[serde(rename = "page[before]")] + #[schemars(with = "Option")] + before: Option, + + /// Retrieve the items after the given ID + #[serde(rename = "page[after]")] + #[schemars(with = "Option")] + after: Option, + + /// Retrieve the first N items + #[serde(rename = "page[first]")] + first: Option, + + /// Retrieve the last N items + #[serde(rename = "page[last]")] + last: Option, + + /// Include the total number of items. Defaults to `true`. + #[serde(rename = "count")] + include_count: Option, +} + +#[derive(Debug, thiserror::Error)] +pub enum PaginationRejection { + #[error("Invalid pagination parameters")] + Invalid(#[from] QueryRejection), + + #[error("Cannot specify both `page[first]` and `page[last]` parameters")] + FirstAndLast, +} + +impl IntoResponse for PaginationRejection { + fn into_response(self) -> axum::response::Response { + ( + StatusCode::BAD_REQUEST, + Json(ErrorResponse::from_error(&self)), + ) + .into_response() + } +} + +/// An extractor for pagination parameters in the query string +#[derive(OperationIo, Debug, Clone, Copy)] +#[aide(input_with = "Query")] +pub struct Pagination(pub mas_storage::Pagination, pub IncludeCount); + +impl FromRequestParts for Pagination { + type Rejection = PaginationRejection; + + async fn from_request_parts( + parts: &mut axum::http::request::Parts, + state: &S, + ) -> Result { + let params = Query::::from_request_parts(parts, state).await?; + + // Figure out the direction and the count out of the first and last parameters + let (direction, count) = match (params.first, params.last) { + // Make sure we don't specify both first and last + (Some(_), Some(_)) => return Err(PaginationRejection::FirstAndLast), + + // Default to forward pagination with a default page size + (None, None) => (PaginationDirection::Forward, DEFAULT_PAGE_SIZE), + + (Some(first), None) => (PaginationDirection::Forward, first.into()), + (None, Some(last)) => (PaginationDirection::Backward, last.into()), + }; + + Ok(Self( + mas_storage::Pagination { + before: params.before, + after: params.after, + direction, + count, + }, + params.include_count.unwrap_or_default(), + )) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/response.rs b/matrix-authentication-service/crates/handlers/src/admin/response.rs new file mode 100644 index 00000000..257773cd --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/response.rs @@ -0,0 +1,296 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::module_name_repetitions)] + +use mas_storage::{Pagination, pagination::Edge}; +use schemars::JsonSchema; +use serde::Serialize; +use ulid::Ulid; + +use super::model::Resource; + +/// Related links +#[derive(Serialize, JsonSchema)] +struct PaginationLinks { + /// The canonical link to the current page + #[serde(rename = "self")] + self_: String, + + /// The link to the first page of results + #[serde(skip_serializing_if = "Option::is_none")] + first: Option, + + /// The link to the last page of results + #[serde(skip_serializing_if = "Option::is_none")] + last: Option, + + /// The link to the next page of results + /// + /// Only present if there is a next page + #[serde(skip_serializing_if = "Option::is_none")] + next: Option, + + /// The link to the previous page of results + /// + /// Only present if there is a previous page + #[serde(skip_serializing_if = "Option::is_none")] + prev: Option, +} + +#[derive(Serialize, JsonSchema)] +struct PaginationMeta { + /// The total number of results + #[serde(skip_serializing_if = "Option::is_none")] + count: Option, +} + +impl PaginationMeta { + fn is_empty(&self) -> bool { + self.count.is_none() + } +} + +/// A top-level response with a page of resources +#[derive(Serialize, JsonSchema)] +pub struct PaginatedResponse { + /// Response metadata + #[serde(skip_serializing_if = "PaginationMeta::is_empty")] + #[schemars(with = "Option")] + meta: PaginationMeta, + + /// The list of resources + #[serde(skip_serializing_if = "Option::is_none")] + data: Option>>, + + /// Related links + links: PaginationLinks, +} + +fn url_with_pagination(base: &str, pagination: Pagination) -> String { + let (path, query) = base.split_once('?').unwrap_or((base, "")); + let mut query = query.to_owned(); + + if let Some(before) = pagination.before { + query = format!("{query}&page[before]={before}"); + } + + if let Some(after) = pagination.after { + query = format!("{query}&page[after]={after}"); + } + + let count = pagination.count; + match pagination.direction { + mas_storage::pagination::PaginationDirection::Forward => { + query = format!("{query}&page[first]={count}"); + } + mas_storage::pagination::PaginationDirection::Backward => { + query = format!("{query}&page[last]={count}"); + } + } + + // Remove the first '&' + let query = query.trim_start_matches('&'); + + format!("{path}?{query}") +} + +impl PaginatedResponse { + pub fn for_page( + page: mas_storage::Page, + current_pagination: Pagination, + count: Option, + base: &str, + ) -> Self { + let links = PaginationLinks { + self_: url_with_pagination(base, current_pagination), + first: Some(url_with_pagination( + base, + Pagination::first(current_pagination.count), + )), + last: Some(url_with_pagination( + base, + Pagination::last(current_pagination.count), + )), + next: page.has_next_page.then(|| { + url_with_pagination( + base, + current_pagination + .clear_before() + .after(page.edges.last().unwrap().cursor), + ) + }), + prev: if page.has_previous_page { + Some(url_with_pagination( + base, + current_pagination + .clear_after() + .before(page.edges.first().unwrap().cursor), + )) + } else { + None + }, + }; + + let data = page + .edges + .into_iter() + .map(SingleResource::from_edge) + .collect(); + + Self { + meta: PaginationMeta { count }, + data: Some(data), + links, + } + } + + pub fn for_count_only(count: usize, base: &str) -> Self { + let links = PaginationLinks { + self_: base.to_owned(), + first: None, + last: None, + next: None, + prev: None, + }; + + Self { + meta: PaginationMeta { count: Some(count) }, + data: None, + links, + } + } +} + +/// A single resource, with its type, ID, attributes and related links +#[derive(Serialize, JsonSchema)] +struct SingleResource { + /// The type of the resource + #[serde(rename = "type")] + type_: &'static str, + + /// The ID of the resource + #[schemars(with = "super::schema::Ulid")] + id: Ulid, + + /// The attributes of the resource + attributes: T, + + /// Related links + links: SelfLinks, + + /// Metadata about the resource + #[serde(skip_serializing_if = "SingleResourceMeta::is_empty")] + #[schemars(with = "Option")] + meta: SingleResourceMeta, +} + +/// Metadata associated with a resource +#[derive(Serialize, JsonSchema)] +struct SingleResourceMeta { + /// Information about the pagination of the resource + #[serde(skip_serializing_if = "Option::is_none")] + page: Option, +} + +impl SingleResourceMeta { + fn is_empty(&self) -> bool { + self.page.is_none() + } +} + +/// Pagination metadata for a resource +#[derive(Serialize, JsonSchema)] +struct SingleResourceMetaPage { + /// The cursor of this resource in the paginated result + cursor: String, +} + +impl SingleResource { + fn new(resource: T) -> Self { + let self_ = resource.path(); + Self { + type_: T::KIND, + id: resource.id(), + attributes: resource, + links: SelfLinks { self_ }, + meta: SingleResourceMeta { page: None }, + } + } + + fn from_edge(edge: Edge) -> Self { + let cursor = edge.cursor.to_string(); + let mut resource = Self::new(edge.node); + resource.meta.page = Some(SingleResourceMetaPage { cursor }); + resource + } +} + +/// Related links +#[derive(Serialize, JsonSchema)] +struct SelfLinks { + /// The canonical link to the current resource + #[serde(rename = "self")] + self_: String, +} + +/// A top-level response with a single resource +#[derive(Serialize, JsonSchema)] +pub struct SingleResponse { + data: SingleResource, + links: SelfLinks, +} + +impl SingleResponse { + /// Create a new single response with the given resource and link to itself + pub fn new(resource: T, self_: String) -> Self { + Self { + data: SingleResource::new(resource), + links: SelfLinks { self_ }, + } + } + + /// Create a new single response using the canonical path for the resource + pub fn new_canonical(resource: T) -> Self { + let self_ = resource.path(); + Self::new(resource, self_) + } +} + +/// A single error +#[derive(Serialize, JsonSchema)] +struct Error { + /// A human-readable title for the error + title: String, +} + +impl Error { + fn from_error(error: &(dyn std::error::Error + 'static)) -> Self { + Self { + title: error.to_string(), + } + } +} + +/// A top-level response with a list of errors +#[derive(Serialize, JsonSchema)] +pub struct ErrorResponse { + /// The list of errors + errors: Vec, +} + +impl ErrorResponse { + /// Create a new error response from any Rust error + pub fn from_error(error: &(dyn std::error::Error + 'static)) -> Self { + let mut errors = Vec::new(); + let mut head = Some(error); + while let Some(error) = head { + errors.push(Error::from_error(error)); + head = error.source(); + } + Self { errors } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/schema.rs b/matrix-authentication-service/crates/handlers/src/admin/schema.rs new file mode 100644 index 00000000..e305df06 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/schema.rs @@ -0,0 +1,59 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Common schema definitions + +use std::borrow::Cow; + +use schemars::{JsonSchema, Schema, SchemaGenerator, json_schema}; + +/// A type to use for schema definitions of ULIDs +/// +/// Use with `#[schemars(with = "crate::admin::schema::Ulid")]` +pub struct Ulid; + +impl JsonSchema for Ulid { + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("ULID") + } + + fn json_schema(_gen: &mut SchemaGenerator) -> Schema { + json_schema!({ + "type": "string", + "title": "ULID", + "description": "A ULID as per https://github.com/ulid/spec", + "examples": [ + "01ARZ3NDEKTSV4RRFFQ69G5FAV", + "01J41912SC8VGAQDD50F6APK91", + ], + "pattern": "^[0123456789ABCDEFGHJKMNPQRSTVWXYZ]{26}$", + }) + } +} + +/// A type to use for schema definitions of device IDs +/// +/// Use with `#[schemars(with = "crate::admin::schema::Device")]` +pub struct Device; + +impl JsonSchema for Device { + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("DeviceID") + } + + fn json_schema(_gen: &mut SchemaGenerator) -> Schema { + json_schema!({ + "type": "string", + "title": "Device ID", + "description": "A device ID as per https://matrix.org/docs/spec/client_server/r0.6.0#device-ids", + "examples": [ + "AABBCCDDEE", + "FFGGHHIIJJ", + ], + "pattern": "^[A-Za-z0-9._~!$&'()*+,;=:&/-]+$", + }) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/finish.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/finish.rs new file mode 100644 index 00000000..df42c2ff --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/finish.rs @@ -0,0 +1,243 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use mas_storage::queue::{QueueJobRepositoryExt as _, SyncDevicesJob}; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{CompatSession, Resource}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Compatibility session with ID {0} not found")] + NotFound(Ulid), + + #[error("Compatibility session with ID {0} is already finished")] + AlreadyFinished(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + Self::AlreadyFinished(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("finishCompatSession") + .summary("Finish a compatibility session") + .description( + "Calling this endpoint will finish the compatibility session, preventing any further use. A job will be scheduled to sync the user's devices with the homeserver.", + ) + .tag("compat-session") + .response_with::<200, Json>, _>(|t| { + // Get the finished session sample + let [_, finished_session, _] = CompatSession::samples(); + let id = finished_session.id(); + let response = SingleResponse::new( + finished_session, + format!("/api/admin/v1/compat-sessions/{id}/finish"), + ); + t.description("Compatibility session was finished").example(response) + }) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::AlreadyFinished(Ulid::nil())); + t.description("Session is already finished") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Compatibility session was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.compat_sessions.finish", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + id: UlidPathParam, +) -> Result>, RouteError> { + let id = *id; + let session = repo + .compat_session() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + // Check if the session is already finished + if session.finished_at().is_some() { + return Err(RouteError::AlreadyFinished(id)); + } + + // Schedule a job to sync the devices of the user with the homeserver + tracing::info!(user.id = %session.user_id, "Scheduling device sync job for user"); + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + SyncDevicesJob::new_for_id(session.user_id), + ) + .await?; + + // Finish the session + let session = repo.compat_session().finish(&clock, session).await?; + + // Get the SSO login info for the response + let sso_login = repo.compat_sso_login().find_for_session(&session).await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + CompatSession::from((session, sso_login)), + format!("/api/admin/v1/compat-sessions/{id}/finish"), + ))) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use mas_data_model::{Clock as _, Device}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_finish_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a user and a compat session + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let device = Device::generate(&mut rng); + let session = repo + .compat_session() + .add(&mut rng, &state.clock, &user, device, None, false, None) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::post(format!( + "/api/admin/v1/compat-sessions/{}/finish", + session.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The finished_at timestamp should be the same as the current time + assert_eq!( + body["data"]["attributes"]["finished_at"], + serde_json::json!(state.clock.now()) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_finish_already_finished_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a user and a compat session + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let device = Device::generate(&mut rng); + let session = repo + .compat_session() + .add(&mut rng, &state.clock, &user, device, None, false, None) + .await + .unwrap(); + + // Finish the session first + let session = repo + .compat_session() + .finish(&state.clock, session) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Move the clock forward + state.clock.advance(Duration::try_minutes(1).unwrap()); + + let request = Request::post(format!( + "/api/admin/v1/compat-sessions/{}/finish", + session.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + format!( + "Compatibility session with ID {} is already finished", + session.id + ) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_finish_unknown_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = + Request::post("/api/admin/v1/compat-sessions/01040G2081040G2081040G2081/finish") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "Compatibility session with ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/get.rs new file mode 100644 index 00000000..c77432d0 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/get.rs @@ -0,0 +1,163 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::CompatSession, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Compatibility session ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, RouteError::Internal(_)); + let status = match &self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getCompatSession") + .summary("Get a compatibility session") + .tag("compat-session") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = CompatSession::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("Compatibility session was found") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Compatibility session was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.compat_sessions.get", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let session = repo + .compat_session() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + let sso_login = repo.compat_sso_login().find_for_session(&session).await?; + + Ok(Json(SingleResponse::new_canonical(CompatSession::from(( + session, sso_login, + ))))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use mas_data_model::Device; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a user and a compat session + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let device = Device::generate(&mut rng); + let session = repo + .compat_session() + .add(&mut rng, &state.clock, &user, device, None, false, None) + .await + .unwrap(); + repo.save().await.unwrap(); + + let session_id = session.id; + let request = Request::get(format!("/api/admin/v1/compat-sessions/{session_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "data": { + "type": "compat-session", + "id": "01FSHN9AG0QHEHKX2JNQ2A2D07", + "attributes": { + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "device_id": "TpLoieH5Ie", + "user_session_id": null, + "redirect_uri": null, + "created_at": "2022-01-16T14:40:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHN9AG0QHEHKX2JNQ2A2D07" + } + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHN9AG0QHEHKX2JNQ2A2D07" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let session_id = Ulid::nil(); + let request = Request::get(format!("/api/admin/v1/compat-sessions/{session_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/list.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/list.rs new file mode 100644 index 00000000..b407854f --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/list.rs @@ -0,0 +1,653 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::{Page, compat::CompatSessionFilter}; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{CompatSession, Resource}, + params::{IncludeCount, Pagination}, + response::{ErrorResponse, PaginatedResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Deserialize, JsonSchema, Clone, Copy)] +#[serde(rename_all = "snake_case")] +enum CompatSessionStatus { + Active, + Finished, +} + +impl std::fmt::Display for CompatSessionStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Active => write!(f, "active"), + Self::Finished => write!(f, "finished"), + } + } +} + +#[derive(FromRequestParts, Deserialize, JsonSchema, OperationIo)] +#[serde(rename = "CompatSessionFilter")] +#[aide(input_with = "Query")] +#[from_request(via(Query), rejection(RouteError))] +pub struct FilterParams { + /// Retrieve the items for the given user + #[serde(rename = "filter[user]")] + #[schemars(with = "Option")] + user: Option, + + /// Retrieve the items started from the given browser session + #[serde(rename = "filter[user-session]")] + #[schemars(with = "Option")] + user_session: Option, + + /// Retrieve the items with the given status + /// + /// Defaults to retrieve all sessions, including finished ones. + /// + /// * `active`: Only retrieve active sessions + /// + /// * `finished`: Only retrieve finished sessions + #[serde(rename = "filter[status]")] + status: Option, +} + +impl std::fmt::Display for FilterParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut sep = '?'; + + if let Some(user) = self.user { + write!(f, "{sep}filter[user]={user}")?; + sep = '&'; + } + + if let Some(user_session) = self.user_session { + write!(f, "{sep}filter[user-session]={user_session}")?; + sep = '&'; + } + + if let Some(status) = self.status { + write!(f, "{sep}filter[status]={status}")?; + sep = '&'; + } + + let _ = sep; + Ok(()) + } +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + UserNotFound(Ulid), + + #[error("User session ID {0} not found")] + UserSessionNotFound(Ulid), + + #[error("Invalid filter parameters")] + InvalidFilter(#[from] QueryRejection), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, RouteError::Internal(_)); + let status = match &self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::UserNotFound(_) | Self::UserSessionNotFound(_) => StatusCode::NOT_FOUND, + Self::InvalidFilter(_) => StatusCode::BAD_REQUEST, + }; + + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("listCompatSessions") + .summary("List compatibility sessions") + .description("Retrieve a list of compatibility sessions. +Note that by default, all sessions, including finished ones are returned, with the oldest first. +Use the `filter[status]` parameter to filter the sessions by their status and `page[last]` parameter to retrieve the last N sessions.") + .tag("compat-session") + .response_with::<200, Json>, _>(|t| { + let sessions = CompatSession::samples(); + let pagination = mas_storage::Pagination::first(sessions.len()); + let page = Page { + edges: sessions + .into_iter() + .map(|node| mas_storage::pagination::Edge { + cursor: node.id(), + node, + }) + .collect(), + has_next_page: true, + has_previous_page: false, + }; + + t.description("Paginated response of compatibility sessions") + .example(PaginatedResponse::for_page( + page, + pagination, + Some(42), + CompatSession::PATH, + )) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound(Ulid::nil())); + t.description("User was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.compat_sessions.list", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + Pagination(pagination, include_count): Pagination, + params: FilterParams, +) -> Result>, RouteError> { + let base = format!("{path}{params}", path = CompatSession::PATH); + let base = include_count.add_to_base(&base); + let filter = CompatSessionFilter::default(); + + // Load the user from the filter + let user = if let Some(user_id) = params.user { + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::UserNotFound(user_id))?; + + Some(user) + } else { + None + }; + + let filter = match &user { + Some(user) => filter.for_user(user), + None => filter, + }; + + let user_session = if let Some(user_session_id) = params.user_session { + let user_session = repo + .browser_session() + .lookup(user_session_id) + .await? + .ok_or(RouteError::UserSessionNotFound(user_session_id))?; + + Some(user_session) + } else { + None + }; + + let filter = match &user_session { + Some(user_session) => filter.for_browser_session(user_session), + None => filter, + }; + + let filter = match params.status { + Some(CompatSessionStatus::Active) => filter.active_only(), + Some(CompatSessionStatus::Finished) => filter.finished_only(), + None => filter, + }; + + let response = match include_count { + IncludeCount::True => { + let page = repo + .compat_session() + .list(filter, pagination) + .await? + .map(CompatSession::from); + let count = repo.compat_session().count(filter).await?; + PaginatedResponse::for_page(page, pagination, Some(count), &base) + } + IncludeCount::False => { + let page = repo + .compat_session() + .list(filter, pagination) + .await? + .map(CompatSession::from); + PaginatedResponse::for_page(page, pagination, None, &base) + } + IncludeCount::Only => { + let count = repo.compat_session().count(filter).await?; + PaginatedResponse::for_count_only(count, &base) + } + }; + + Ok(Json(response)) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use mas_data_model::Device; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_compat_session_list(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision two users, one compat session for each, and finish one of them + let mut repo = state.repository().await.unwrap(); + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + state.clock.advance(Duration::minutes(1)); + + let bob = repo + .user() + .add(&mut rng, &state.clock, "bob".to_owned()) + .await + .unwrap(); + + let device = Device::generate(&mut rng); + repo.compat_session() + .add(&mut rng, &state.clock, &alice, device, None, false, None) + .await + .unwrap(); + let device = Device::generate(&mut rng); + + state.clock.advance(Duration::minutes(1)); + + let session = repo + .compat_session() + .add(&mut rng, &state.clock, &bob, device, None, false, None) + .await + .unwrap(); + state.clock.advance(Duration::minutes(1)); + repo.compat_session() + .finish(&state.clock, session) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::get("/api/admin/v1/compat-sessions") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "compat-session", + "id": "01FSHNB530AAPR7PEV8KNBZD5Y", + "attributes": { + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "device_id": "LoieH5Iecx", + "user_session_id": null, + "redirect_uri": null, + "created_at": "2022-01-16T14:41:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHNB530AAPR7PEV8KNBZD5Y" + }, + "meta": { + "page": { + "cursor": "01FSHNB530AAPR7PEV8KNBZD5Y" + } + } + }, + { + "type": "compat-session", + "id": "01FSHNCZP0PPF7X0EVMJNECPZW", + "attributes": { + "user_id": "01FSHNB530AJ6AC5HQ9X6H4RP4", + "device_id": "ZXyvelQWW9", + "user_session_id": null, + "redirect_uri": null, + "created_at": "2022-01-16T14:42:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": "2022-01-16T14:43:00Z", + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHNCZP0PPF7X0EVMJNECPZW" + }, + "meta": { + "page": { + "cursor": "01FSHNCZP0PPF7X0EVMJNECPZW" + } + } + } + ], + "links": { + "self": "/api/admin/v1/compat-sessions?page[first]=10", + "first": "/api/admin/v1/compat-sessions?page[first]=10", + "last": "/api/admin/v1/compat-sessions?page[last]=10" + } + } + "#); + + // Filter by user + let request = Request::get(format!( + "/api/admin/v1/compat-sessions?filter[user]={}", + alice.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "compat-session", + "id": "01FSHNB530AAPR7PEV8KNBZD5Y", + "attributes": { + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "device_id": "LoieH5Iecx", + "user_session_id": null, + "redirect_uri": null, + "created_at": "2022-01-16T14:41:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHNB530AAPR7PEV8KNBZD5Y" + }, + "meta": { + "page": { + "cursor": "01FSHNB530AAPR7PEV8KNBZD5Y" + } + } + } + ], + "links": { + "self": "/api/admin/v1/compat-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[first]=10", + "first": "/api/admin/v1/compat-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[first]=10", + "last": "/api/admin/v1/compat-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[last]=10" + } + } + "#); + + // Filter by status (active) + let request = Request::get("/api/admin/v1/compat-sessions?filter[status]=active") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "compat-session", + "id": "01FSHNB530AAPR7PEV8KNBZD5Y", + "attributes": { + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "device_id": "LoieH5Iecx", + "user_session_id": null, + "redirect_uri": null, + "created_at": "2022-01-16T14:41:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHNB530AAPR7PEV8KNBZD5Y" + }, + "meta": { + "page": { + "cursor": "01FSHNB530AAPR7PEV8KNBZD5Y" + } + } + } + ], + "links": { + "self": "/api/admin/v1/compat-sessions?filter[status]=active&page[first]=10", + "first": "/api/admin/v1/compat-sessions?filter[status]=active&page[first]=10", + "last": "/api/admin/v1/compat-sessions?filter[status]=active&page[last]=10" + } + } + "#); + + // Filter by status (finished) + let request = Request::get("/api/admin/v1/compat-sessions?filter[status]=finished") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "compat-session", + "id": "01FSHNCZP0PPF7X0EVMJNECPZW", + "attributes": { + "user_id": "01FSHNB530AJ6AC5HQ9X6H4RP4", + "device_id": "ZXyvelQWW9", + "user_session_id": null, + "redirect_uri": null, + "created_at": "2022-01-16T14:42:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": "2022-01-16T14:43:00Z", + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHNCZP0PPF7X0EVMJNECPZW" + }, + "meta": { + "page": { + "cursor": "01FSHNCZP0PPF7X0EVMJNECPZW" + } + } + } + ], + "links": { + "self": "/api/admin/v1/compat-sessions?filter[status]=finished&page[first]=10", + "first": "/api/admin/v1/compat-sessions?filter[status]=finished&page[first]=10", + "last": "/api/admin/v1/compat-sessions?filter[status]=finished&page[last]=10" + } + } + "#); + + // Test count=false + let request = Request::get("/api/admin/v1/compat-sessions?count=false") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "compat-session", + "id": "01FSHNB530AAPR7PEV8KNBZD5Y", + "attributes": { + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "device_id": "LoieH5Iecx", + "user_session_id": null, + "redirect_uri": null, + "created_at": "2022-01-16T14:41:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHNB530AAPR7PEV8KNBZD5Y" + }, + "meta": { + "page": { + "cursor": "01FSHNB530AAPR7PEV8KNBZD5Y" + } + } + }, + { + "type": "compat-session", + "id": "01FSHNCZP0PPF7X0EVMJNECPZW", + "attributes": { + "user_id": "01FSHNB530AJ6AC5HQ9X6H4RP4", + "device_id": "ZXyvelQWW9", + "user_session_id": null, + "redirect_uri": null, + "created_at": "2022-01-16T14:42:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": "2022-01-16T14:43:00Z", + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHNCZP0PPF7X0EVMJNECPZW" + }, + "meta": { + "page": { + "cursor": "01FSHNCZP0PPF7X0EVMJNECPZW" + } + } + } + ], + "links": { + "self": "/api/admin/v1/compat-sessions?count=false&page[first]=10", + "first": "/api/admin/v1/compat-sessions?count=false&page[first]=10", + "last": "/api/admin/v1/compat-sessions?count=false&page[last]=10" + } + } + "#); + + // Test count=only + let request = Request::get("/api/admin/v1/compat-sessions?count=only") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "links": { + "self": "/api/admin/v1/compat-sessions?count=only" + } + } + "#); + + // Test count=false with filtering + let request = Request::get(format!( + "/api/admin/v1/compat-sessions?count=false&filter[user]={}", + alice.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "compat-session", + "id": "01FSHNB530AAPR7PEV8KNBZD5Y", + "attributes": { + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "device_id": "LoieH5Iecx", + "user_session_id": null, + "redirect_uri": null, + "created_at": "2022-01-16T14:41:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01FSHNB530AAPR7PEV8KNBZD5Y" + }, + "meta": { + "page": { + "cursor": "01FSHNB530AAPR7PEV8KNBZD5Y" + } + } + } + ], + "links": { + "self": "/api/admin/v1/compat-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[first]=10", + "first": "/api/admin/v1/compat-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[first]=10", + "last": "/api/admin/v1/compat-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[last]=10" + } + } + "#); + + // Test count=only with filtering + let request = + Request::get("/api/admin/v1/compat-sessions?count=only&filter[status]=active") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "links": { + "self": "/api/admin/v1/compat-sessions?filter[status]=active&count=only" + } + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/mod.rs new file mode 100644 index 00000000..db7b17ff --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/compat_sessions/mod.rs @@ -0,0 +1,14 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod finish; +mod get; +mod list; + +pub use self::{ + finish::{doc as finish_doc, handler as finish}, + get::{doc as get_doc, handler as get}, + list::{doc as list_doc, handler as list}, +}; diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/mod.rs new file mode 100644 index 00000000..98f1d10e --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/mod.rs @@ -0,0 +1,262 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use aide::axum::{ + ApiRouter, + routing::{get_with, post_with}, +}; +use axum::extract::{FromRef, FromRequestParts}; +use mas_data_model::{AppVersion, BoxRng, SiteConfig}; +use mas_matrix::HomeserverConnection; +use mas_policy::PolicyFactory; + +use super::call_context::CallContext; +use crate::passwords::PasswordManager; + +mod compat_sessions; +mod oauth2_sessions; +mod personal_sessions; +mod policy_data; +mod site_config; +mod upstream_oauth_links; +mod upstream_oauth_providers; +mod user_emails; +mod user_registration_tokens; +mod user_sessions; +mod users; +mod version; + +pub fn router() -> ApiRouter +where + S: Clone + Send + Sync + 'static, + Arc: FromRef, + PasswordManager: FromRef, + SiteConfig: FromRef, + AppVersion: FromRef, + Arc: FromRef, + BoxRng: FromRequestParts, + CallContext: FromRequestParts, +{ + ApiRouter::::new() + .api_route( + "/site-config", + get_with(self::site_config::handler, self::site_config::doc), + ) + .api_route( + "/version", + get_with(self::version::handler, self::version::doc), + ) + .api_route( + "/compat-sessions", + get_with(self::compat_sessions::list, self::compat_sessions::list_doc), + ) + .api_route( + "/compat-sessions/{id}", + get_with(self::compat_sessions::get, self::compat_sessions::get_doc), + ) + .api_route( + "/compat-sessions/{id}/finish", + post_with( + self::compat_sessions::finish, + self::compat_sessions::finish_doc, + ), + ) + .api_route( + "/oauth2-sessions", + get_with(self::oauth2_sessions::list, self::oauth2_sessions::list_doc), + ) + .api_route( + "/oauth2-sessions/{id}", + get_with(self::oauth2_sessions::get, self::oauth2_sessions::get_doc), + ) + .api_route( + "/oauth2-sessions/{id}/finish", + post_with( + self::oauth2_sessions::finish, + self::oauth2_sessions::finish_doc, + ), + ) + .api_route( + "/personal-sessions", + get_with( + self::personal_sessions::list, + self::personal_sessions::list_doc, + ) + .post_with( + self::personal_sessions::add, + self::personal_sessions::add_doc, + ), + ) + .api_route( + "/personal-sessions/{id}", + get_with( + self::personal_sessions::get, + self::personal_sessions::get_doc, + ), + ) + .api_route( + "/personal-sessions/{id}/revoke", + post_with( + self::personal_sessions::revoke, + self::personal_sessions::revoke_doc, + ), + ) + .api_route( + "/personal-sessions/{id}/regenerate", + post_with( + self::personal_sessions::regenerate, + self::personal_sessions::regenerate_doc, + ), + ) + .api_route( + "/policy-data", + post_with(self::policy_data::set, self::policy_data::set_doc), + ) + .api_route( + "/policy-data/latest", + get_with( + self::policy_data::get_latest, + self::policy_data::get_latest_doc, + ), + ) + .api_route( + "/policy-data/{id}", + get_with(self::policy_data::get, self::policy_data::get_doc), + ) + .api_route( + "/users", + get_with(self::users::list, self::users::list_doc) + .post_with(self::users::add, self::users::add_doc), + ) + .api_route( + "/users/{id}", + get_with(self::users::get, self::users::get_doc), + ) + .api_route( + "/users/{id}/set-password", + post_with(self::users::set_password, self::users::set_password_doc), + ) + .api_route( + "/users/by-username/{username}", + get_with(self::users::by_username, self::users::by_username_doc), + ) + .api_route( + "/users/{id}/set-admin", + post_with(self::users::set_admin, self::users::set_admin_doc), + ) + .api_route( + "/users/{id}/deactivate", + post_with(self::users::deactivate, self::users::deactivate_doc), + ) + .api_route( + "/users/{id}/reactivate", + post_with(self::users::reactivate, self::users::reactivate_doc), + ) + .api_route( + "/users/{id}/lock", + post_with(self::users::lock, self::users::lock_doc), + ) + .api_route( + "/users/{id}/unlock", + post_with(self::users::unlock, self::users::unlock_doc), + ) + .api_route( + "/user-emails", + get_with(self::user_emails::list, self::user_emails::list_doc) + .post_with(self::user_emails::add, self::user_emails::add_doc), + ) + .api_route( + "/user-emails/{id}", + get_with(self::user_emails::get, self::user_emails::get_doc) + .delete_with(self::user_emails::delete, self::user_emails::delete_doc), + ) + .api_route( + "/user-sessions", + get_with(self::user_sessions::list, self::user_sessions::list_doc), + ) + .api_route( + "/user-sessions/{id}", + get_with(self::user_sessions::get, self::user_sessions::get_doc), + ) + .api_route( + "/user-sessions/{id}/finish", + post_with(self::user_sessions::finish, self::user_sessions::finish_doc), + ) + .api_route( + "/user-registration-tokens", + get_with( + self::user_registration_tokens::list, + self::user_registration_tokens::list_doc, + ) + .post_with( + self::user_registration_tokens::add, + self::user_registration_tokens::add_doc, + ), + ) + .api_route( + "/user-registration-tokens/{id}", + get_with( + self::user_registration_tokens::get, + self::user_registration_tokens::get_doc, + ) + .put_with( + self::user_registration_tokens::update, + self::user_registration_tokens::update_doc, + ), + ) + .api_route( + "/user-registration-tokens/{id}/revoke", + post_with( + self::user_registration_tokens::revoke, + self::user_registration_tokens::revoke_doc, + ), + ) + .api_route( + "/user-registration-tokens/{id}/unrevoke", + post_with( + self::user_registration_tokens::unrevoke, + self::user_registration_tokens::unrevoke_doc, + ), + ) + .api_route( + "/upstream-oauth-links", + get_with( + self::upstream_oauth_links::list, + self::upstream_oauth_links::list_doc, + ) + .post_with( + self::upstream_oauth_links::add, + self::upstream_oauth_links::add_doc, + ), + ) + .api_route( + "/upstream-oauth-links/{id}", + get_with( + self::upstream_oauth_links::get, + self::upstream_oauth_links::get_doc, + ) + .delete_with( + self::upstream_oauth_links::delete, + self::upstream_oauth_links::delete_doc, + ), + ) + .api_route( + "/upstream-oauth-providers", + get_with( + self::upstream_oauth_providers::list, + self::upstream_oauth_providers::list_doc, + ), + ) + .api_route( + "/upstream-oauth-providers/{id}", + get_with( + self::upstream_oauth_providers::get, + self::upstream_oauth_providers::get_doc, + ), + ) +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/finish.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/finish.rs new file mode 100644 index 00000000..23edef30 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/finish.rs @@ -0,0 +1,234 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use mas_storage::queue::{QueueJobRepositoryExt as _, SyncDevicesJob}; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{OAuth2Session, Resource}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("OAuth 2.0 session with ID {0} not found")] + NotFound(Ulid), + + #[error("OAuth 2.0 session with ID {0} is already finished")] + AlreadyFinished(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + Self::AlreadyFinished(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("finishOAuth2Session") + .summary("Finish an OAuth 2.0 session") + .description( + "Calling this endpoint will finish the OAuth 2.0 session, preventing any further use. If the session has a user associated with it, a job will be scheduled to sync the user's devices with the homeserver.", + ) + .tag("oauth2-session") + .response_with::<200, Json>, _>(|t| { + // Get the finished session sample + let [_, _, finished_session] = OAuth2Session::samples(); + let id = finished_session.id(); + let response = SingleResponse::new( + finished_session, + format!("/api/admin/v1/oauth2-sessions/{id}/finish"), + ); + t.description("OAuth 2.0 session was finished").example(response) + }) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::AlreadyFinished(Ulid::nil())); + t.description("Session is already finished") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("OAuth 2.0 session was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.oauth2_sessions.finish", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + id: UlidPathParam, +) -> Result>, RouteError> { + let id = *id; + let session = repo + .oauth2_session() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + // Check if the session is already finished + if session.finished_at().is_some() { + return Err(RouteError::AlreadyFinished(id)); + } + + // If the session has a user associated with it, schedule a job to sync devices + if let Some(user_id) = session.user_id { + tracing::info!(user.id = %user_id, "Scheduling device sync job for user"); + let job = SyncDevicesJob::new_for_id(user_id); + repo.queue_job().schedule_job(&mut rng, &clock, job).await?; + } + + // Finish the session + let session = repo.oauth2_session().finish(&clock, session).await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + OAuth2Session::from(session), + format!("/api/admin/v1/oauth2-sessions/{id}/finish"), + ))) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use mas_data_model::{AccessToken, Clock as _}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_finish_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Get the session ID from the token we just created + let mut repo = state.repository().await.unwrap(); + let AccessToken { session_id, .. } = repo + .oauth2_access_token() + .find_by_token(&token) + .await + .unwrap() + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::post(format!("/api/admin/v1/oauth2-sessions/{session_id}/finish")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The finished_at timestamp should be the same as the current time + assert_eq!( + body["data"]["attributes"]["finished_at"], + serde_json::json!(state.clock.now()) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_finish_already_finished_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + + // Create first admin token for the API call + let admin_token = state.token_with_scope("urn:mas:admin").await; + + // Create a second admin session that we'll finish + let second_admin_token = state.token_with_scope("urn:mas:admin").await; + + // Get the second session and finish it first + let mut repo = state.repository().await.unwrap(); + let AccessToken { session_id, .. } = repo + .oauth2_access_token() + .find_by_token(&second_admin_token) + .await + .unwrap() + .unwrap(); + + let session = repo + .oauth2_session() + .lookup(session_id) + .await + .unwrap() + .unwrap(); + + // Finish the session first + let session = repo + .oauth2_session() + .finish(&state.clock, session) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Move the clock forward + state.clock.advance(Duration::try_minutes(1).unwrap()); + + let request = Request::post(format!( + "/api/admin/v1/oauth2-sessions/{}/finish", + session.id + )) + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + format!( + "OAuth 2.0 session with ID {} is already finished", + session.id + ) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_finish_unknown_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = + Request::post("/api/admin/v1/oauth2-sessions/01040G2081040G2081040G2081/finish") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "OAuth 2.0 session with ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/get.rs new file mode 100644 index 00000000..653bb69b --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/get.rs @@ -0,0 +1,154 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::OAuth2Session, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("OAuth 2.0 session ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, RouteError::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getOAuth2Session") + .summary("Get an OAuth 2.0 session") + .tag("oauth2-session") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = OAuth2Session::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("OAuth 2.0 session was found") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("OAuth 2.0 session was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.oauth2_session.get", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let session = repo + .oauth2_session() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + Ok(Json(SingleResponse::new_canonical(OAuth2Session::from( + session, + )))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_data_model::AccessToken; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // state.token_with_scope did create a session, so we can get it here + let mut repo = state.repository().await.unwrap(); + let AccessToken { session_id, .. } = repo + .oauth2_access_token() + .find_by_token(&token) + .await + .unwrap() + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::get(format!("/api/admin/v1/oauth2-sessions/{session_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_eq!(body["data"]["type"], "oauth2-session"); + insta::assert_json_snapshot!(body, @r#" + { + "data": { + "type": "oauth2-session", + "id": "01FSHN9AG0MKGTBNZ16RDR3PVY", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "finished_at": null, + "user_id": null, + "user_session_id": null, + "client_id": "01FSHN9AG0FAQ50MT1E9FFRPZR", + "scope": "urn:mas:admin", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/01FSHN9AG0MKGTBNZ16RDR3PVY" + } + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/01FSHN9AG0MKGTBNZ16RDR3PVY" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let session_id = Ulid::nil(); + let request = Request::get(format!("/api/admin/v1/oauth2-sessions/{session_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/list.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/list.rs new file mode 100644 index 00000000..37f6ed37 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/list.rs @@ -0,0 +1,454 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::str::FromStr; + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::{Page, oauth2::OAuth2SessionFilter}; +use oauth2_types::scope::{Scope, ScopeToken}; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{OAuth2Session, Resource}, + params::{IncludeCount, Pagination}, + response::{ErrorResponse, PaginatedResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Deserialize, JsonSchema, Clone, Copy)] +#[serde(rename_all = "snake_case")] +enum OAuth2SessionStatus { + Active, + Finished, +} + +impl std::fmt::Display for OAuth2SessionStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Active => write!(f, "active"), + Self::Finished => write!(f, "finished"), + } + } +} + +#[derive(Deserialize, JsonSchema, Clone, Copy)] +#[serde(rename_all = "snake_case")] +enum OAuth2ClientKind { + Dynamic, + Static, +} + +impl std::fmt::Display for OAuth2ClientKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Dynamic => write!(f, "dynamic"), + Self::Static => write!(f, "static"), + } + } +} + +#[derive(FromRequestParts, Deserialize, JsonSchema, OperationIo)] +#[serde(rename = "OAuth2SessionFilter")] +#[aide(input_with = "Query")] +#[from_request(via(Query), rejection(RouteError))] +pub struct FilterParams { + /// Retrieve the items for the given user + #[serde(rename = "filter[user]")] + #[schemars(with = "Option")] + user: Option, + + /// Retrieve the items for the given client + #[serde(rename = "filter[client]")] + #[schemars(with = "Option")] + client: Option, + + /// Retrieve the items only for a specific client kind + #[serde(rename = "filter[client-kind]")] + client_kind: Option, + + /// Retrieve the items started from the given browser session + #[serde(rename = "filter[user-session]")] + #[schemars(with = "Option")] + user_session: Option, + + /// Retrieve the items with the given scope + #[serde(default, rename = "filter[scope]")] + scope: Vec, + + /// Retrieve the items with the given status + /// + /// Defaults to retrieve all sessions, including finished ones. + /// + /// * `active`: Only retrieve active sessions + /// + /// * `finished`: Only retrieve finished sessions + #[serde(rename = "filter[status]")] + status: Option, +} + +impl std::fmt::Display for FilterParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut sep = '?'; + + if let Some(user) = self.user { + write!(f, "{sep}filter[user]={user}")?; + sep = '&'; + } + + if let Some(client) = self.client { + write!(f, "{sep}filter[client]={client}")?; + sep = '&'; + } + + if let Some(client_kind) = self.client_kind { + write!(f, "{sep}filter[client-kind]={client_kind}")?; + sep = '&'; + } + + if let Some(user_session) = self.user_session { + write!(f, "{sep}filter[user-session]={user_session}")?; + sep = '&'; + } + + for scope in &self.scope { + write!(f, "{sep}filter[scope]={scope}")?; + sep = '&'; + } + + if let Some(status) = self.status { + write!(f, "{sep}filter[status]={status}")?; + sep = '&'; + } + + let _ = sep; + Ok(()) + } +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + UserNotFound(Ulid), + + #[error("Client ID {0} not found")] + ClientNotFound(Ulid), + + #[error("User session ID {0} not found")] + UserSessionNotFound(Ulid), + + #[error("Invalid filter parameters")] + InvalidFilter(#[from] QueryRejection), + + #[error("Invalid scope {0:?} in filter parameters")] + InvalidScope(String), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, RouteError::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::UserNotFound(_) | Self::ClientNotFound(_) | Self::UserSessionNotFound(_) => { + StatusCode::NOT_FOUND + } + Self::InvalidScope(_) | Self::InvalidFilter(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("listOAuth2Sessions") + .summary("List OAuth 2.0 sessions") + .description("Retrieve a list of OAuth 2.0 sessions. +Note that by default, all sessions, including finished ones are returned, with the oldest first. +Use the `filter[status]` parameter to filter the sessions by their status and `page[last]` parameter to retrieve the last N sessions.") + .tag("oauth2-session") + .response_with::<200, Json>, _>(|t| { + let sessions = OAuth2Session::samples(); + let pagination = mas_storage::Pagination::first(sessions.len()); + let page = Page { + edges: sessions + .into_iter() + .map(|node| mas_storage::pagination::Edge { + cursor: node.id(), + node, + }) + .collect(), + has_next_page: true, + has_previous_page: false, + }; + + t.description("Paginated response of OAuth 2.0 sessions") + .example(PaginatedResponse::for_page( + page, + pagination, + Some(42), + OAuth2Session::PATH, + )) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound(Ulid::nil())); + t.description("User was not found").example(response) + }) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::InvalidScope("not a valid scope".to_owned())); + t.description("Invalid scope").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.oauth2_sessions.list", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + Pagination(pagination, include_count): Pagination, + params: FilterParams, +) -> Result>, RouteError> { + let base = format!("{path}{params}", path = OAuth2Session::PATH); + let base = include_count.add_to_base(&base); + let filter = OAuth2SessionFilter::default(); + + // Load the user from the filter + let user = if let Some(user_id) = params.user { + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::UserNotFound(user_id))?; + + Some(user) + } else { + None + }; + + let filter = match &user { + Some(user) => filter.for_user(user), + None => filter, + }; + + let client = if let Some(client_id) = params.client { + let client = repo + .oauth2_client() + .lookup(client_id) + .await? + .ok_or(RouteError::ClientNotFound(client_id))?; + + Some(client) + } else { + None + }; + + let filter = match &client { + Some(client) => filter.for_client(client), + None => filter, + }; + + let filter = match params.client_kind { + Some(OAuth2ClientKind::Dynamic) => filter.only_dynamic_clients(), + Some(OAuth2ClientKind::Static) => filter.only_static_clients(), + None => filter, + }; + + let user_session = if let Some(user_session_id) = params.user_session { + let user_session = repo + .browser_session() + .lookup(user_session_id) + .await? + .ok_or(RouteError::UserSessionNotFound(user_session_id))?; + + Some(user_session) + } else { + None + }; + + let filter = match &user_session { + Some(user_session) => filter.for_browser_session(user_session), + None => filter, + }; + + let scope: Scope = params + .scope + .into_iter() + .map(|s| ScopeToken::from_str(&s).map_err(|_| RouteError::InvalidScope(s))) + .collect::>()?; + + let filter = if scope.is_empty() { + filter + } else { + filter.with_scope(&scope) + }; + + let filter = match params.status { + Some(OAuth2SessionStatus::Active) => filter.active_only(), + Some(OAuth2SessionStatus::Finished) => filter.finished_only(), + None => filter, + }; + + let response = match include_count { + IncludeCount::True => { + let page = repo + .oauth2_session() + .list(filter, pagination) + .await? + .map(OAuth2Session::from); + let count = repo.oauth2_session().count(filter).await?; + PaginatedResponse::for_page(page, pagination, Some(count), &base) + } + IncludeCount::False => { + let page = repo + .oauth2_session() + .list(filter, pagination) + .await? + .map(OAuth2Session::from); + PaginatedResponse::for_page(page, pagination, None, &base) + } + IncludeCount::Only => { + let count = repo.oauth2_session().count(filter).await?; + PaginatedResponse::for_count_only(count, &base) + } + }; + + Ok(Json(response)) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_oauth2_simple_session_list(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // We already have a session because of the token above + let request = Request::get("/api/admin/v1/oauth2-sessions") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "oauth2-session", + "id": "01FSHN9AG0MKGTBNZ16RDR3PVY", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "finished_at": null, + "user_id": null, + "user_session_id": null, + "client_id": "01FSHN9AG0FAQ50MT1E9FFRPZR", + "scope": "urn:mas:admin", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/01FSHN9AG0MKGTBNZ16RDR3PVY" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MKGTBNZ16RDR3PVY" + } + } + } + ], + "links": { + "self": "/api/admin/v1/oauth2-sessions?page[first]=10", + "first": "/api/admin/v1/oauth2-sessions?page[first]=10", + "last": "/api/admin/v1/oauth2-sessions?page[last]=10" + } + } + "#); + + // Test count=false + let request = Request::get("/api/admin/v1/oauth2-sessions?count=false") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "oauth2-session", + "id": "01FSHN9AG0MKGTBNZ16RDR3PVY", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "finished_at": null, + "user_id": null, + "user_session_id": null, + "client_id": "01FSHN9AG0FAQ50MT1E9FFRPZR", + "scope": "urn:mas:admin", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/01FSHN9AG0MKGTBNZ16RDR3PVY" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MKGTBNZ16RDR3PVY" + } + } + } + ], + "links": { + "self": "/api/admin/v1/oauth2-sessions?count=false&page[first]=10", + "first": "/api/admin/v1/oauth2-sessions?count=false&page[first]=10", + "last": "/api/admin/v1/oauth2-sessions?count=false&page[last]=10" + } + } + "#); + + // Test count=only + let request = Request::get("/api/admin/v1/oauth2-sessions?count=only") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions?count=only" + } + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/mod.rs new file mode 100644 index 00000000..5ac2e049 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/oauth2_sessions/mod.rs @@ -0,0 +1,15 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod finish; +mod get; +mod list; + +pub use self::{ + finish::{doc as finish_doc, handler as finish}, + get::{doc as get_doc, handler as get}, + list::{doc as list_doc, handler as list}, +}; diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/add.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/add.rs new file mode 100644 index 00000000..2cfe1fb8 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/add.rs @@ -0,0 +1,311 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use anyhow::Context; +use axum::{Json, extract::State, response::IntoResponse}; +use chrono::Duration; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::{BoxRng, Device, TokenType}; +use mas_matrix::HomeserverConnection; +use oauth2_types::scope::Scope; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{InconsistentPersonalSession, PersonalSession}, + response::{ErrorResponse, SingleResponse}, + v1::personal_sessions::personal_session_owner_from_caller, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User not found")] + UserNotFound, + + #[error("User is not active")] + UserDeactivated, + + #[error("Invalid scope")] + InvalidScope, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(InconsistentPersonalSession); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::UserNotFound => StatusCode::NOT_FOUND, + Self::UserDeactivated => StatusCode::GONE, + Self::InvalidScope => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +/// # JSON payload for the `POST /api/admin/v1/personal-sessions` endpoint +#[derive(Deserialize, JsonSchema)] +#[serde(rename = "CreatePersonalSessionRequest")] +pub struct Request { + /// The user this session will act on behalf of + #[schemars(with = "crate::admin::schema::Ulid")] + actor_user_id: Ulid, + + /// Human-readable name for the session + human_name: String, + + /// `OAuth2` scopes for this session + scope: String, + + /// Token expiry time in seconds. + /// If not set, the token won't expire. + expires_in: Option, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("createPersonalSession") + .summary("Create a new personal session with personal access token") + .tag("personal-session") + .response_with::<201, Json>, _>(|t| { + t.description("Personal session and personal access token were created") + }) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::InvalidScope); + t.description("Invalid scope provided").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound); + t.description("User was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.personal_sessions.add", skip_all)] +pub async fn handler( + CallContext { + mut repo, + clock, + session, + .. + }: CallContext, + NoApi(mut rng): NoApi, + NoApi(State(homeserver)): NoApi>>, + Json(params): Json, +) -> Result<(StatusCode, Json>), RouteError> { + let owner = personal_session_owner_from_caller(&session); + + let actor_user = repo + .user() + .lookup(params.actor_user_id) + .await? + .ok_or(RouteError::UserNotFound)?; + + if !actor_user.is_valid_actor() { + return Err(RouteError::UserDeactivated); + } + + let scope: Scope = params.scope.parse().map_err(|_| RouteError::InvalidScope)?; + + // Create the personal session + let session = repo + .personal_session() + .add( + &mut rng, + &clock, + owner, + &actor_user, + params.human_name, + scope, + ) + .await?; + + // Create the initial token for the session + let access_token_string = TokenType::PersonalAccessToken.generate(&mut rng); + let access_token = repo + .personal_access_token() + .add( + &mut rng, + &clock, + &session, + &access_token_string, + params + .expires_in + .map(|exp_in| Duration::seconds(i64::from(exp_in))), + ) + .await?; + + // If the session has a device, we should add those to the homeserver now + if session.has_device() { + // Lock the user sync to make sure we don't get into a race condition + repo.user().acquire_lock_for_sync(&actor_user).await?; + + for scope in &*session.scope { + if let Some(device) = Device::from_scope_token(scope) { + // NOTE: We haven't relinquished the repo at this point, + // so we are holding a transaction across the homeserver + // operation. + // This is suboptimal, but simpler. + // Given this is an administrative endpoint, this is a tolerable + // compromise for now. + homeserver + .upsert_device(&actor_user.username, device.as_str(), None) + .await + .context("Failed to provision device") + .map_err(|e| RouteError::Internal(e.into()))?; + } + } + } + + repo.save().await?; + + Ok(( + StatusCode::CREATED, + Json(SingleResponse::new_canonical( + PersonalSession::try_from((session, Some(access_token)))? + .with_token(access_token_string), + )), + )) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use serde_json::Value; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_create_personal_session_with_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Create a user for testing + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request_body = serde_json::json!({ + "actor_user_id": user.id, + "human_name": "Test Session", + "scope": "openid urn:mas:admin", + "expires_in": 3600 + }); + + let request = Request::post("/api/admin/v1/personal-sessions") + .bearer(&token) + .json(&request_body); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let body: Value = response.json(); + + assert_json_snapshot!(body, @r#" + { + "data": { + "type": "personal-session", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "revoked_at": null, + "owner_user_id": null, + "owner_client_id": "01FSHN9AG0FAQ50MT1E9FFRPZR", + "actor_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_name": "Test Session", + "scope": "openid urn:mas:admin", + "last_active_at": null, + "last_active_ip": null, + "expires_at": "2022-01-16T15:40:00Z", + "access_token": "mpt_FM44zJN5qePGMLvvMXC4Ds1A3lCWc6_bJ9Wj1" + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG07HNEZXNQM2KNBNF6" + } + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_create_personal_session_invalid_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request_body = serde_json::json!({ + "actor_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "scope": "openid", + "human_name": "Test Session", + "expires_in": 3600 + }); + + let request = Request::post("/api/admin/v1/personal-sessions") + .bearer(&token) + .json(&request_body); + + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_create_personal_session_invalid_scope(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Create a user for testing + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request_body = serde_json::json!({ + "actor_user_id": user.id, + "human_name": "Test Session", + "scope": "invalid\nscope", + "expires_in": 3600 + }); + + let request = Request::post("/api/admin/v1/personal-sessions") + .bearer(&token) + .json(&request_body); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/get.rs new file mode 100644 index 00000000..c0c0378f --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/get.rs @@ -0,0 +1,189 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; + +use crate::{ + admin::{ + call_context::CallContext, + model::{InconsistentPersonalSession, PersonalSession}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Personal session not found")] + NotFound, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(InconsistentPersonalSession); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getPersonalSession") + .summary("Get a personal session") + .tag("personal-session") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = PersonalSession::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("Personal session details").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound); + t.description("Personal session not found") + .example(response) + }) +} + +#[tracing::instrument( + name = "handler.admin.v1.personal_sessions.get", + skip_all, + fields(personal_session.id = %*id), +)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let session_id = *id; + + let session = repo + .personal_session() + .lookup(session_id) + .await? + .ok_or(RouteError::NotFound)?; + + let token = if session.is_revoked() { + None + } else { + repo.personal_access_token() + .find_active_for_session(&session) + .await? + }; + + Ok(Json(SingleResponse::new_canonical( + PersonalSession::try_from((session, token))?, + ))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use mas_data_model::personal::session::PersonalSessionOwner; + use oauth2_types::scope::{OPENID, Scope}; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Create a user and personal session for testing + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let personal_session = repo + .personal_session() + .add( + &mut rng, + &state.clock, + PersonalSessionOwner::from(&user), + &user, + "Test session".to_owned(), + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + repo.personal_access_token() + .add(&mut rng, &state.clock, &personal_session, "mpt_hiss", None) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::get(format!( + "/api/admin/v1/personal-sessions/{}", + personal_session.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_eq!(body["data"]["id"], personal_session.id.to_string()); + assert_json_snapshot!(body, @r#" + { + "data": { + "type": "personal-session", + "id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "revoked_at": null, + "owner_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "owner_client_id": null, + "actor_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_name": "Test session", + "scope": "openid", + "last_active_at": null, + "last_active_ip": null, + "expires_at": null + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let session_id = Ulid::nil(); + let request = Request::get(format!("/api/admin/v1/personal-sessions/{session_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/list.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/list.rs new file mode 100644 index 00000000..c9d3d55d --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/list.rs @@ -0,0 +1,585 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::str::FromStr as _; + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use chrono::{DateTime, Utc}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::personal::PersonalSessionFilter; +use oauth2_types::scope::{Scope, ScopeToken}; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{InconsistentPersonalSession, PersonalSession, Resource}, + params::{IncludeCount, Pagination}, + response::{ErrorResponse, PaginatedResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Deserialize, JsonSchema, Clone, Copy)] +#[serde(rename_all = "snake_case")] +enum PersonalSessionStatus { + Active, + Revoked, +} + +impl std::fmt::Display for PersonalSessionStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Active => write!(f, "active"), + Self::Revoked => write!(f, "revoked"), + } + } +} + +#[derive(FromRequestParts, Deserialize, JsonSchema, OperationIo)] +#[serde(rename = "PersonalSessionFilter")] +#[aide(input_with = "Query")] +#[from_request(via(Query), rejection(RouteError))] +pub struct FilterParams { + /// Filter by owner user ID + #[serde(rename = "filter[owner_user]")] + #[schemars(with = "Option")] + owner_user: Option, + + /// Filter by owner `OAuth2` client ID + #[serde(rename = "filter[owner_client]")] + #[schemars(with = "Option")] + owner_client: Option, + + /// Filter by actor user ID + #[serde(rename = "filter[actor_user]")] + #[schemars(with = "Option")] + actor_user: Option, + + /// Retrieve the items with the given scope + #[serde(default, rename = "filter[scope]")] + scope: Vec, + + /// Filter by session status + #[serde(rename = "filter[status]")] + status: Option, + + /// Filter by access token expiry date + #[serde(rename = "filter[expires_before]")] + expires_before: Option>, + + /// Filter by access token expiry date + #[serde(rename = "filter[expires_after]")] + expires_after: Option>, + + /// Filter by whether the access token has an expiry time + #[serde(rename = "filter[expires]")] + expires: Option, +} + +impl std::fmt::Display for FilterParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut sep = '?'; + + if let Some(owner_user) = self.owner_user { + write!(f, "{sep}filter[owner_user]={owner_user}")?; + sep = '&'; + } + if let Some(owner_client) = self.owner_client { + write!(f, "{sep}filter[owner_client]={owner_client}")?; + sep = '&'; + } + if let Some(actor_user) = self.actor_user { + write!(f, "{sep}filter[actor_user]={actor_user}")?; + sep = '&'; + } + for scope in &self.scope { + write!(f, "{sep}filter[scope]={scope}")?; + sep = '&'; + } + if let Some(status) = self.status { + write!(f, "{sep}filter[status]={status}")?; + sep = '&'; + } + if let Some(expires_before) = self.expires_before { + write!( + f, + "{sep}filter[expires_before]={}", + expires_before.format("%Y-%m-%dT%H:%M:%SZ") + )?; + sep = '&'; + } + if let Some(expires_after) = self.expires_after { + write!( + f, + "{sep}filter[expires_after]={}", + expires_after.format("%Y-%m-%dT%H:%M:%SZ") + )?; + sep = '&'; + } + if let Some(expires) = self.expires { + write!(f, "{sep}filter[expires]={expires}")?; + sep = '&'; + } + + let _ = sep; + Ok(()) + } +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + UserNotFound(Ulid), + + #[error("Client ID {0} not found")] + ClientNotFound(Ulid), + + #[error("Invalid filter parameters")] + InvalidFilter(#[from] QueryRejection), + + #[error("Invalid scope {0:?} in filter parameters")] + InvalidScope(String), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(InconsistentPersonalSession); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::UserNotFound(_) | Self::ClientNotFound(_) => StatusCode::NOT_FOUND, + Self::InvalidScope(_) | Self::InvalidFilter(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("listPersonalSessions") + .summary("List personal sessions") + .description("Retrieve a list of personal sessions. +Note that by default, all sessions, including revoked ones are returned, with the oldest first. +Use the `filter[status]` parameter to filter the sessions by their status and `page[last]` parameter to retrieve the last N sessions.") + .tag("personal-session") + .response_with::<200, Json>, _>(|t| { + let sessions = PersonalSession::samples(); + let pagination = mas_storage::Pagination::first(sessions.len()); + let page = mas_storage::Page { + edges: sessions + .into_iter() + .map(|node| mas_storage::pagination::Edge { + cursor: node.id(), + node, + }) + .collect(), + has_next_page: true, + has_previous_page: false, + }; + + t.description("Paginated response of personal sessions") + .example(PaginatedResponse::for_page( + page, + pagination, + Some(3), + PersonalSession::PATH, + )) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound(Ulid::nil())); + t.description("User was not found").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::ClientNotFound(Ulid::nil())); + t.description("Client was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.personal_sessions.list", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + Pagination(pagination, include_count): Pagination, + params: FilterParams, +) -> Result>, RouteError> { + let base = format!("{path}{params}", path = PersonalSession::PATH); + let base = include_count.add_to_base(&base); + + let filter = PersonalSessionFilter::new(); + + let owner_user = if let Some(owner_user_id) = params.owner_user { + let owner_user = repo + .user() + .lookup(owner_user_id) + .await? + .ok_or(RouteError::UserNotFound(owner_user_id))?; + Some(owner_user) + } else { + None + }; + + let filter = match &owner_user { + Some(user) => filter.for_owner_user(user), + None => filter, + }; + + let owner_client = if let Some(owner_client_id) = params.owner_client { + let owner_client = repo + .oauth2_client() + .lookup(owner_client_id) + .await? + .ok_or(RouteError::ClientNotFound(owner_client_id))?; + Some(owner_client) + } else { + None + }; + + let filter = match &owner_client { + Some(client) => filter.for_owner_oauth2_client(client), + None => filter, + }; + + let actor_user = if let Some(actor_user_id) = params.actor_user { + let user = repo + .user() + .lookup(actor_user_id) + .await? + .ok_or(RouteError::UserNotFound(actor_user_id))?; + Some(user) + } else { + None + }; + + let filter = match &actor_user { + Some(user) => filter.for_actor_user(user), + None => filter, + }; + + let scope: Scope = params + .scope + .into_iter() + .map(|s| ScopeToken::from_str(&s).map_err(|_| RouteError::InvalidScope(s))) + .collect::>()?; + + let filter = if scope.is_empty() { + filter + } else { + filter.with_scope(&scope) + }; + + let filter = match params.status { + Some(PersonalSessionStatus::Active) => filter.active_only(), + Some(PersonalSessionStatus::Revoked) => filter.finished_only(), + None => filter, + }; + + let filter = if let Some(expires_after) = params.expires_after { + filter.with_expires_after(expires_after) + } else { + filter + }; + + let filter = if let Some(expires_before) = params.expires_before { + filter.with_expires_before(expires_before) + } else { + filter + }; + + let filter = if let Some(expires) = params.expires { + filter.with_expires(expires) + } else { + filter + }; + + let response = match include_count { + IncludeCount::True => { + let page = repo.personal_session().list(filter, pagination).await?; + let count = repo.personal_session().count(filter).await?; + PaginatedResponse::for_page( + page.try_map(PersonalSession::try_from)?, + pagination, + Some(count), + &base, + ) + } + IncludeCount::False => { + let page = repo.personal_session().list(filter, pagination).await?; + PaginatedResponse::for_page( + page.try_map(PersonalSession::try_from)?, + pagination, + None, + &base, + ) + } + IncludeCount::Only => { + let count = repo.personal_session().count(filter).await?; + PaginatedResponse::for_count_only(count, &base) + } + }; + + Ok(Json(response)) +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeSet; + + use chrono::Duration; + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use mas_data_model::personal::session::PersonalSessionOwner; + use oauth2_types::scope::{OPENID, Scope}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_list(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + + // Create a user and personal session for testing + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let personal_session = repo + .personal_session() + .add( + &mut rng, + &state.clock, + PersonalSessionOwner::from(&user), + &user, + "Test session".to_owned(), + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + repo.personal_access_token() + .add( + &mut rng, + &state.clock, + &personal_session, + "mpt_hiss", + Some(Duration::days(42)), + ) + .await + .unwrap(); + + state.clock.advance(Duration::days(1)); + + let personal_session = repo + .personal_session() + .add( + &mut rng, + &state.clock, + PersonalSessionOwner::from(&user), + &user, + "Another test session".to_owned(), + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + repo.personal_access_token() + .add( + &mut rng, + &state.clock, + &personal_session, + "mpt_scratch", + Some(Duration::days(21)), + ) + .await + .unwrap(); + repo.personal_session() + .revoke(&state.clock, personal_session) + .await + .unwrap(); + + state.clock.advance(Duration::days(1)); + + let personal_session = repo + .personal_session() + .add( + &mut rng, + &state.clock, + PersonalSessionOwner::from(&user), + &user, + "Another test session".to_owned(), + Scope::from_iter([OPENID, "urn:mas:admin".parse().unwrap()]), + ) + .await + .unwrap(); + repo.personal_access_token() + .add( + &mut rng, + &state.clock, + &personal_session, + "mpt_meow", + Some(Duration::days(14)), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let token = state.token_with_scope("urn:mas:admin").await; + let request = Request::get("/api/admin/v1/personal-sessions") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 3 + }, + "data": [ + { + "type": "personal-session", + "id": "01FSHN9AG0YQYAR04VCYTHJ8SK", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "revoked_at": null, + "owner_user_id": "01FSHN9AG09FE39KETP6F390F8", + "owner_client_id": null, + "actor_user_id": "01FSHN9AG09FE39KETP6F390F8", + "human_name": "Test session", + "scope": "openid", + "last_active_at": null, + "last_active_ip": null, + "expires_at": "2022-02-27T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0YQYAR04VCYTHJ8SK" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0YQYAR04VCYTHJ8SK" + } + } + }, + { + "type": "personal-session", + "id": "01FSM7P1G0VBGAMK9D9QMGQ5MY", + "attributes": { + "created_at": "2022-01-17T14:40:00Z", + "revoked_at": "2022-01-17T14:40:00Z", + "owner_user_id": "01FSHN9AG09FE39KETP6F390F8", + "owner_client_id": null, + "actor_user_id": "01FSHN9AG09FE39KETP6F390F8", + "human_name": "Another test session", + "scope": "openid", + "last_active_at": null, + "last_active_ip": null, + "expires_at": null + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSM7P1G0VBGAMK9D9QMGQ5MY" + }, + "meta": { + "page": { + "cursor": "01FSM7P1G0VBGAMK9D9QMGQ5MY" + } + } + }, + { + "type": "personal-session", + "id": "01FSPT2RG08Y11Y5BM4VZ4CN8K", + "attributes": { + "created_at": "2022-01-18T14:40:00Z", + "revoked_at": null, + "owner_user_id": "01FSHN9AG09FE39KETP6F390F8", + "owner_client_id": null, + "actor_user_id": "01FSHN9AG09FE39KETP6F390F8", + "human_name": "Another test session", + "scope": "openid urn:mas:admin", + "last_active_at": null, + "last_active_ip": null, + "expires_at": "2022-02-01T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSPT2RG08Y11Y5BM4VZ4CN8K" + }, + "meta": { + "page": { + "cursor": "01FSPT2RG08Y11Y5BM4VZ4CN8K" + } + } + } + ], + "links": { + "self": "/api/admin/v1/personal-sessions?page[first]=10", + "first": "/api/admin/v1/personal-sessions?page[first]=10", + "last": "/api/admin/v1/personal-sessions?page[last]=10" + } + } + "#); + + // Map of filters to their expected set of returned ULIDs + let filters_and_expected: &[(&str, &[&str])] = &[ + ( + "filter[expires_before]=2022-02-15T00:00:00Z", + &["01FSPT2RG08Y11Y5BM4VZ4CN8K"], + ), + ( + "filter[expires_after]=2022-02-15T00:00:00Z", + &["01FSHN9AG0YQYAR04VCYTHJ8SK"], + ), + ( + "filter[status]=active", + &["01FSHN9AG0YQYAR04VCYTHJ8SK", "01FSPT2RG08Y11Y5BM4VZ4CN8K"], + ), + ("filter[status]=revoked", &["01FSM7P1G0VBGAMK9D9QMGQ5MY"]), + ( + "filter[expires]=true", + &["01FSHN9AG0YQYAR04VCYTHJ8SK", "01FSPT2RG08Y11Y5BM4VZ4CN8K"], + ), + ("filter[expires]=false", &["01FSM7P1G0VBGAMK9D9QMGQ5MY"]), + ( + "filter[scope]=urn:mas:admin", + &["01FSPT2RG08Y11Y5BM4VZ4CN8K"], + ), + ]; + + for (filter, expected_ids) in filters_and_expected { + let request = Request::get(format!("/api/admin/v1/personal-sessions?{filter}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + let found: BTreeSet<&str> = body["data"] + .as_array() + .unwrap() + .iter() + .map(|item| item["id"].as_str().unwrap()) + .collect(); + let expected: BTreeSet<&str> = expected_ids.iter().copied().collect(); + + assert_eq!( + found, expected, + "filter {filter} did not produce expected results" + ); + } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/mod.rs new file mode 100644 index 00000000..37c591b0 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/mod.rs @@ -0,0 +1,39 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod add; +mod get; +mod list; +mod regenerate; +mod revoke; + +use mas_data_model::personal::session::PersonalSessionOwner; + +pub use self::{ + add::{doc as add_doc, handler as add}, + get::{doc as get_doc, handler as get}, + list::{doc as list_doc, handler as list}, + regenerate::{doc as regenerate_doc, handler as regenerate}, + revoke::{doc as revoke_doc, handler as revoke}, +}; +use crate::admin::call_context::CallerSession; + +/// Given the [`CallerSession`] of a caller of the Admin API, +/// return the [`PersonalSessionOwner`] that should own created personal +/// sessions. +fn personal_session_owner_from_caller(caller: &CallerSession) -> PersonalSessionOwner { + match caller { + CallerSession::OAuth2Session(session) => { + if let Some(user_id) = session.user_id { + PersonalSessionOwner::User(user_id) + } else { + PersonalSessionOwner::OAuth2Client(session.client_id) + } + } + CallerSession::PersonalSession(session) => { + PersonalSessionOwner::User(session.actor_user_id) + } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/regenerate.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/regenerate.rs new file mode 100644 index 00000000..e6c70679 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/regenerate.rs @@ -0,0 +1,246 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use chrono::Duration; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::{BoxRng, TokenType}; +use schemars::JsonSchema; +use serde::Deserialize; +use tracing::error; + +use crate::{ + admin::{ + call_context::CallContext, + model::{InconsistentPersonalSession, PersonalSession}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + v1::personal_sessions::personal_session_owner_from_caller, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User not found")] + UserNotFound, + + #[error("Session not found")] + SessionNotFound, + + #[error("Session not valid")] + SessionNotValid, + + #[error("Session does not belong to you")] + SessionNotYours, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(InconsistentPersonalSession); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::UserNotFound | Self::SessionNotFound => StatusCode::NOT_FOUND, + Self::SessionNotValid => StatusCode::UNPROCESSABLE_ENTITY, + Self::SessionNotYours => StatusCode::FORBIDDEN, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +/// # JSON payload for the `POST /api/admin/v1/personal-sessions/{id}/regenerate` endpoint +#[derive(Deserialize, JsonSchema)] +#[serde(rename = "RegeneratePersonalSessionRequest")] +pub struct Request { + /// Token expiry time in seconds. + /// If not set, the token won't expire. + expires_in: Option, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("regeneratePersonalSession") + .summary("Regenerate a personal session by replacing its personal access token") + .tag("personal-session") + .response_with::<201, Json>, _>(|t| { + t.description( + "Personal session was regenerated and a personal access token was created", + ) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound); + t.description("User was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.personal_sessions.add", skip_all)] +pub async fn handler( + CallContext { + mut repo, + clock, + session: caller_session, + .. + }: CallContext, + NoApi(mut rng): NoApi, + id: UlidPathParam, + Json(params): Json, +) -> Result<(StatusCode, Json>), RouteError> { + let session_id = *id; + + let session = repo + .personal_session() + .lookup(session_id) + .await? + .ok_or(RouteError::SessionNotFound)?; + + if !session.is_valid() { + // We don't revive revoked sessions through regeneration + return Err(RouteError::SessionNotValid); + } + + // If the owner is not the current caller, then currently we reject the + // regeneration. + let caller = personal_session_owner_from_caller(&caller_session); + if session.owner != caller { + return Err(RouteError::SessionNotYours); + } + + // Revoke the existing active token for the session. + let old_token_opt = repo + .personal_access_token() + .find_active_for_session(&session) + .await?; + let Some(old_token) = old_token_opt else { + // This shouldn't happen + error!("session is supposedly valid but had no access token"); + return Err(RouteError::SessionNotValid); + }; + + repo.personal_access_token() + .revoke(&clock, old_token) + .await?; + + // Create the regenerated token for the session + let access_token_string = TokenType::PersonalAccessToken.generate(&mut rng); + let access_token = repo + .personal_access_token() + .add( + &mut rng, + &clock, + &session, + &access_token_string, + params + .expires_in + .map(|exp_in| Duration::seconds(i64::from(exp_in))), + ) + .await?; + + repo.save().await?; + + Ok(( + StatusCode::CREATED, + Json(SingleResponse::new_canonical( + PersonalSession::try_from((session, Some(access_token)))? + .with_token(access_token_string), + )), + )) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use serde_json::{Value, json}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_regenerate_personal_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Create a user for testing + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::post("/api/admin/v1/personal-sessions") + .bearer(&token) + .json(json!({ + "actor_user_id": user.id, + "human_name": "SuperDuperAdminCLITool Token", + "scope": "openid urn:mas:admin", + "expires_in": 3600 + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let created: Value = response.json(); + + let session_id = created["data"]["id"].as_str().unwrap(); + + state.clock.advance(Duration::minutes(3)); + + let request = Request::post(format!( + "/api/admin/v1/personal-sessions/{session_id}/regenerate" + )) + .bearer(&token) + .json(json!({ + "expires_in": 86400 + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let body: Value = response.json(); + + assert_json_snapshot!(body, @r#" + { + "data": { + "type": "personal-session", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "revoked_at": null, + "owner_user_id": null, + "owner_client_id": "01FSHN9AG0FAQ50MT1E9FFRPZR", + "actor_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_name": "SuperDuperAdminCLITool Token", + "scope": "openid urn:mas:admin", + "last_active_at": null, + "last_active_ip": null, + "expires_at": "2022-01-17T14:43:00Z", + "access_token": "mpt_6cq7FqNSYoosbXl3bbpfh9yNy9NzuR_0vOV2O" + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG07HNEZXNQM2KNBNF6" + } + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/revoke.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/revoke.rs new file mode 100644 index 00000000..10fd6650 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/personal_sessions/revoke.rs @@ -0,0 +1,250 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use mas_storage::queue::{QueueJobRepositoryExt as _, SyncDevicesJob}; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{InconsistentPersonalSession, PersonalSession}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Personal session with ID {0} not found")] + NotFound(Ulid), + + #[error("Personal session with ID {0} is already revoked")] + AlreadyRevoked(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(InconsistentPersonalSession); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + Self::AlreadyRevoked(_) => StatusCode::CONFLICT, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("revokePersonalSession") + .summary("Revoke a personal session") + .tag("personal-session") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = PersonalSession::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("Personal session was revoked") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Personal session not found") + .example(response) + }) + .response_with::<409, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::AlreadyRevoked(Ulid::nil())); + t.description("Personal session already revoked") + .example(response) + }) +} + +#[tracing::instrument( + name = "handler.admin.v1.personal_sessions.revoke", + skip_all, + fields(personal_session.id = %*session_id), +)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + session_id: UlidPathParam, +) -> Result>, RouteError> { + let session_id = *session_id; + let session = repo + .personal_session() + .lookup(session_id) + .await? + .ok_or(RouteError::NotFound(session_id))?; + + if session.is_revoked() { + return Err(RouteError::AlreadyRevoked(session_id)); + } + + let session = repo.personal_session().revoke(&clock, session).await?; + + if session.has_device() { + // If the session has a device, then we are now + // deleting a device and should schedule a device sync to clean up. + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + SyncDevicesJob::new_for_id(session.actor_user_id), + ) + .await?; + } + + repo.save().await?; + + Ok(Json(SingleResponse::new_canonical( + PersonalSession::try_from((session, None))?, + ))) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use mas_data_model::{Clock, personal::session::PersonalSessionOwner}; + use oauth2_types::scope::Scope; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_revoke_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Create a user and personal session for testing + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let personal_session = repo + .personal_session() + .add( + &mut rng, + &state.clock, + PersonalSessionOwner::from(&user), + &user, + "Test session".to_owned(), + Scope::from_iter([]), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::post(format!( + "/api/admin/v1/personal-sessions/{}/revoke", + personal_session.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The revoked_at timestamp should be the same as the current time + assert_eq!( + body["data"]["attributes"]["revoked_at"], + serde_json::json!(Clock::now(&state.clock)) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_revoke_already_revoked_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Create a user and personal session for testing + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let personal_session = repo + .personal_session() + .add( + &mut rng, + &state.clock, + PersonalSessionOwner::from(&user), + &user, + "Test session".to_owned(), + Scope::from_iter([]), + ) + .await + .unwrap(); + + // Revoke the session first + let session = repo + .personal_session() + .revoke(&state.clock, personal_session) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Move the clock forward + state.clock.advance(Duration::try_minutes(1).unwrap()); + + let request = Request::post(format!( + "/api/admin/v1/personal-sessions/{}/revoke", + session.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::CONFLICT); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + format!("Personal session with ID {} is already revoked", session.id) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_revoke_unknown_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = + Request::post("/api/admin/v1/personal-sessions/01040G2081040G2081040G2081/revoke") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "Personal session with ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/get.rs new file mode 100644 index 00000000..1ba0517f --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/get.rs @@ -0,0 +1,156 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::PolicyData, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Policy data with ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getPolicyData") + .summary("Get policy data by ID") + .tag("policy-data") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = PolicyData::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("Policy data was found").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Policy data was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.policy_data.get", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let policy_data = repo + .policy_data() + .get() + .await? + .ok_or(RouteError::NotFound(*id))?; + + Ok(Json(SingleResponse::new_canonical(policy_data.into()))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + let policy_data = repo + .policy_data() + .set( + &mut rng, + &state.clock, + serde_json::json!({"hello": "world"}), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::get(format!("/api/admin/v1/policy-data/{}", policy_data.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "data": { + "type": "policy-data", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "data": { + "hello": "world" + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::get(format!("/api/admin/v1/policy-data/{}", Ulid::nil())) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "errors": [ + { + "title": "Policy data with ID 00000000000000000000000000 not found" + } + ] + } + "###); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/get_latest.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/get_latest.rs new file mode 100644 index 00000000..102c578f --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/get_latest.rs @@ -0,0 +1,152 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; + +use crate::{ + admin::{ + call_context::CallContext, + model::PolicyData, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("No policy data found")] + NotFound, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getLatestPolicyData") + .summary("Get the latest policy data") + .tag("policy-data") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = PolicyData::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("Latest policy data was found") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound); + t.description("No policy data was found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.policy_data.get_latest", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, +) -> Result>, RouteError> { + let policy_data = repo + .policy_data() + .get() + .await? + .ok_or(RouteError::NotFound)?; + + Ok(Json(SingleResponse::new_canonical(policy_data.into()))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get_latest(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + repo.policy_data() + .set( + &mut rng, + &state.clock, + serde_json::json!({"hello": "world"}), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::get("/api/admin/v1/policy-data/latest") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "data": { + "type": "policy-data", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "data": { + "hello": "world" + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get_no_latest(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::get("/api/admin/v1/policy-data/latest") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "errors": [ + { + "title": "No policy data found" + } + ] + } + "###); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/mod.rs new file mode 100644 index 00000000..f8952e71 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/mod.rs @@ -0,0 +1,14 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod get; +mod get_latest; +mod set; + +pub use self::{ + get::{doc as get_doc, handler as get}, + get_latest::{doc as get_latest_doc, handler as get_latest}, + set::{doc as set_doc, handler as set}, +}; diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/set.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/set.rs new file mode 100644 index 00000000..5bee6141 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/policy_data/set.rs @@ -0,0 +1,155 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, extract::State, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use mas_policy::PolicyFactory; +use schemars::JsonSchema; +use serde::Deserialize; + +use crate::{ + admin::{ + call_context::CallContext, + model::PolicyData, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error("Failed to instanciate policy with the provided data")] + InvalidPolicyData(#[from] mas_policy::LoadError), + + #[error(transparent)] + Internal(Box), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + RouteError::InvalidPolicyData(_) => StatusCode::BAD_REQUEST, + RouteError::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +fn data_example() -> serde_json::Value { + serde_json::json!({ + "hello": "world", + "foo": 42, + "bar": true + }) +} + +/// # JSON payload for the `POST /api/admin/v1/policy-data` +#[derive(Deserialize, JsonSchema)] +#[serde(rename = "SetPolicyDataRequest")] +pub struct SetPolicyDataRequest { + #[schemars(example = data_example())] + pub data: serde_json::Value, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("setPolicyData") + .summary("Set the current policy data") + .tag("policy-data") + .response_with::<201, Json>, _>(|t| { + let [sample, ..] = PolicyData::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("Policy data was successfully set") + .example(response) + }) + .response_with::<400, Json, _>(|t| { + let error = ErrorResponse::from_error(&RouteError::InvalidPolicyData( + mas_policy::LoadError::invalid_data_example(), + )); + t.description("Invalid policy data").example(error) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.policy_data.set", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + State(policy_factory): State>, + Json(request): Json, +) -> Result<(StatusCode, Json>), RouteError> { + let policy_data = repo + .policy_data() + .set(&mut rng, &clock, request.data) + .await?; + + // Swap the policy data. This will fail if the policy data is invalid + policy_factory.set_dynamic_data(policy_data.clone()).await?; + + repo.save().await?; + + Ok(( + StatusCode::CREATED, + Json(SingleResponse::new_canonical(policy_data.into())), + )) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_create(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/policy-data") + .bearer(&token) + .json(serde_json::json!({ + "data": { + "hello": "world" + } + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "data": { + "type": "policy-data", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "data": { + "hello": "world" + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "###); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/site_config.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/site_config.rs new file mode 100644 index 00000000..40a5db51 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/site_config.rs @@ -0,0 +1,97 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::transform::TransformOperation; +use axum::{Json, extract::State}; +use schemars::JsonSchema; +use serde::Serialize; + +use crate::admin::call_context::CallContext; + +#[allow(clippy::struct_excessive_bools)] +#[derive(Serialize, JsonSchema)] +pub struct SiteConfig { + /// The Matrix server name for which this instance is configured + server_name: String, + + /// Whether password login is enabled. + pub password_login_enabled: bool, + + /// Whether password registration is enabled. + pub password_registration_enabled: bool, + + /// Whether a valid email address is required for password registrations. + pub password_registration_email_required: bool, + + /// Whether registration tokens are required for password registrations. + pub registration_token_required: bool, + + /// Whether users can change their email. + pub email_change_allowed: bool, + + /// Whether users can change their display name. + pub displayname_change_allowed: bool, + + /// Whether users can change their password. + pub password_change_allowed: bool, + + /// Whether users can recover their account via email. + pub account_recovery_allowed: bool, + + /// Whether users can delete their own account. + pub account_deactivation_allowed: bool, + + /// Whether CAPTCHA during registration is enabled. + pub captcha_enabled: bool, + + /// Minimum password complexity, between 0 and 4. + /// This is a score from zxcvbn. + #[schemars(range(min = 0, max = 4))] + pub minimum_password_complexity: u8, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("siteConfig") + .tag("server") + .summary("Get informations about the configuration of this MAS instance") + .response_with::<200, Json, _>(|t| { + t.example(SiteConfig { + server_name: "example.com".to_owned(), + password_login_enabled: true, + password_registration_enabled: true, + password_registration_email_required: true, + registration_token_required: true, + email_change_allowed: true, + displayname_change_allowed: true, + password_change_allowed: true, + account_recovery_allowed: true, + account_deactivation_allowed: true, + captcha_enabled: true, + minimum_password_complexity: 3, + }) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.site_config", skip_all)] +pub async fn handler( + _: CallContext, + State(site_config): State, +) -> Json { + Json(SiteConfig { + server_name: site_config.server_name, + password_login_enabled: site_config.password_login_enabled, + password_registration_enabled: site_config.password_registration_enabled, + password_registration_email_required: site_config.password_registration_email_required, + registration_token_required: site_config.registration_token_required, + email_change_allowed: site_config.email_change_allowed, + displayname_change_allowed: site_config.displayname_change_allowed, + password_change_allowed: site_config.password_change_allowed, + account_recovery_allowed: site_config.account_recovery_allowed, + account_deactivation_allowed: site_config.account_deactivation_allowed, + captcha_enabled: site_config.captcha.is_some(), + minimum_password_complexity: site_config.minimum_password_complexity, + }) +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/add.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/add.rs new file mode 100644 index 00000000..56821624 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/add.rs @@ -0,0 +1,465 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UpstreamOAuthLink}, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Upstream Oauth 2.0 Provider ID {0} with subject {1} is already linked to a user")] + LinkAlreadyExists(Ulid, String), + + #[error("User ID {0} not found")] + UserNotFound(Ulid), + + #[error("Upstream OAuth 2.0 Provider ID {0} not found")] + ProviderNotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::LinkAlreadyExists(_, _) => StatusCode::CONFLICT, + Self::UserNotFound(_) | Self::ProviderNotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +/// # JSON payload for the `POST /api/admin/v1/upstream-oauth-links` +#[derive(Deserialize, JsonSchema)] +#[serde(rename = "AddUpstreamOauthLinkRequest")] +pub struct Request { + /// The ID of the user to which the link should be added. + #[schemars(with = "crate::admin::schema::Ulid")] + user_id: Ulid, + + /// The ID of the upstream provider to which the link is for. + #[schemars(with = "crate::admin::schema::Ulid")] + provider_id: Ulid, + + /// The subject (sub) claim of the user on the provider. + subject: String, + + /// A human readable account name. + human_account_name: Option, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("addUpstreamOAuthLink") + .summary("Add an upstream OAuth 2.0 link") + .tag("upstream-oauth-link") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = UpstreamOAuthLink::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("An existing Upstream OAuth 2.0 link was associated to a user") + .example(response) + }) + .response_with::<201, Json>, _>(|t| { + let [sample, ..] = UpstreamOAuthLink::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("A new Upstream OAuth 2.0 link was created") + .example(response) + }) + .response_with::<409, RouteError, _>(|t| { + let [provider_sample, ..] = UpstreamOAuthLink::samples(); + let response = ErrorResponse::from_error(&RouteError::LinkAlreadyExists( + provider_sample.id(), + String::from("subject1"), + )); + t.description("The subject from the provider is already linked to another user") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound(Ulid::nil())); + t.description("User or provider was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.upstream_oauth_links.post", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + Json(params): Json, +) -> Result<(StatusCode, Json>), RouteError> { + // Find the user + let user = repo + .user() + .lookup(params.user_id) + .await? + .ok_or(RouteError::UserNotFound(params.user_id))?; + + // Find the provider + let provider = repo + .upstream_oauth_provider() + .lookup(params.provider_id) + .await? + .ok_or(RouteError::ProviderNotFound(params.provider_id))?; + + let maybe_link = repo + .upstream_oauth_link() + .find_by_subject(&provider, ¶ms.subject) + .await?; + if let Some(mut link) = maybe_link { + if link.user_id.is_some() { + return Err(RouteError::LinkAlreadyExists( + link.provider_id, + link.subject, + )); + } + + repo.upstream_oauth_link() + .associate_to_user(&link, &user) + .await?; + link.user_id = Some(user.id); + + repo.save().await?; + + return Ok(( + StatusCode::OK, + Json(SingleResponse::new_canonical(link.into())), + )); + } + + let mut link = repo + .upstream_oauth_link() + .add( + &mut rng, + &clock, + &provider, + params.subject, + params.human_account_name, + ) + .await?; + + repo.upstream_oauth_link() + .associate_to_user(&link, &user) + .await?; + link.user_id = Some(user.id); + + repo.save().await?; + + Ok(( + StatusCode::CREATED, + Json(SingleResponse::new_canonical(link.into())), + )) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + use ulid::Ulid; + + use super::super::test_utils; + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_create(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + test_utils::oidc_provider_params("provider1"), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::post("/api/admin/v1/upstream-oauth-links") + .bearer(&token) + .json(serde_json::json!({ + "user_id": alice.id, + "provider_id": provider.id, + "subject": "subject1" + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "data": { + "type": "upstream-oauth-link", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "subject": "subject1", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG07HNEZXNQM2KNBNF6" + } + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_association(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + test_utils::oidc_provider_params("provider1"), + ) + .await + .unwrap(); + + // Existing unfinished link + repo.upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider, + String::from("subject1"), + None, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::post("/api/admin/v1/upstream-oauth-links") + .bearer(&token) + .json(serde_json::json!({ + "user_id": alice.id, + "provider_id": provider.id, + "subject": "subject1" + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "data": { + "type": "upstream-oauth-link", + "id": "01FSHN9AG09NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "subject": "subject1", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG09NMZYX8MFYH578R9" + } + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG09NMZYX8MFYH578R9" + } + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_link_already_exists(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let bob = repo + .user() + .add(&mut rng, &state.clock, "bob".to_owned()) + .await + .unwrap(); + + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + test_utils::oidc_provider_params("provider1"), + ) + .await + .unwrap(); + + let link = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider, + String::from("subject1"), + None, + ) + .await + .unwrap(); + + repo.upstream_oauth_link() + .associate_to_user(&link, &alice) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::post("/api/admin/v1/upstream-oauth-links") + .bearer(&token) + .json(serde_json::json!({ + "user_id": bob.id, + "provider_id": provider.id, + "subject": "subject1" + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CONFLICT); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "errors": [ + { + "title": "Upstream Oauth 2.0 Provider ID 01FSHN9AG09NMZYX8MFYH578R9 with subject subject1 is already linked to a user" + } + ] + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_user_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + test_utils::oidc_provider_params("provider1"), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::post("/api/admin/v1/upstream-oauth-links") + .bearer(&token) + .json(serde_json::json!({ + "user_id": Ulid::nil(), + "provider_id": provider.id, + "subject": "subject1" + })); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_provider_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::post("/api/admin/v1/upstream-oauth-links") + .bearer(&token) + .json(serde_json::json!({ + "user_id": alice.id, + "provider_id": Ulid::nil(), + "subject": "subject1" + })); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "errors": [ + { + "title": "Upstream OAuth 2.0 Provider ID 00000000000000000000000000 not found" + } + ] + } + "###); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/delete.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/delete.rs new file mode 100644 index 00000000..3e87109b --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/delete.rs @@ -0,0 +1,180 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{call_context::CallContext, params::UlidPathParam, response::ErrorResponse}, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Upstream OAuth 2.0 Link ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("deleteUpstreamOAuthLink") + .summary("Delete an upstream OAuth 2.0 link") + .tag("upstream-oauth-link") + .response_with::<204, (), _>(|t| t.description("Upstream OAuth 2.0 link was deleted")) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Upstream OAuth 2.0 link was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.upstream_oauth_links.delete", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + id: UlidPathParam, +) -> Result { + let link = repo + .upstream_oauth_link() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + repo.upstream_oauth_link().remove(&clock, link).await?; + + repo.save().await?; + + Ok(StatusCode::NO_CONTENT) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_data_model::UpstreamOAuthAuthorizationSessionState; + use sqlx::PgPool; + use ulid::Ulid; + + use super::super::test_utils; + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_delete(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + test_utils::oidc_provider_params("provider1"), + ) + .await + .unwrap(); + + // Pretend it was linked by an authorization session + let session = repo + .upstream_oauth_session() + .add(&mut rng, &state.clock, &provider, String::new(), None, None) + .await + .unwrap(); + + let link = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider, + String::from("subject1"), + None, + ) + .await + .unwrap(); + + let session = repo + .upstream_oauth_session() + .complete_with_link(&state.clock, session, &link, None, None, None, None) + .await + .unwrap(); + + repo.upstream_oauth_link() + .associate_to_user(&link, &alice) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::delete(format!("/api/admin/v1/upstream-oauth-links/{}", link.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NO_CONTENT); + + // Verify that the link was deleted + let request = Request::get(format!("/api/admin/v1/upstream-oauth-links/{}", link.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + + // Verify that the session was marked as unlinked + let mut repo = state.repository().await.unwrap(); + let session = repo + .upstream_oauth_session() + .lookup(session.id) + .await + .unwrap() + .unwrap(); + assert!(matches!( + session.state, + UpstreamOAuthAuthorizationSessionState::Unlinked { .. } + )); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let link_id = Ulid::nil(); + let request = Request::delete(format!("/api/admin/v1/upstream-oauth-links/{link_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/get.rs new file mode 100644 index 00000000..483d9089 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/get.rs @@ -0,0 +1,173 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::UpstreamOAuthLink, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Upstream OAuth 2.0 Link ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_entry_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_entry_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getUpstreamOAuthLink") + .summary("Get an upstream OAuth 2.0 link") + .tag("upstream-oauth-link") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = UpstreamOAuthLink::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("Upstream OAuth 2.0 link was found") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Upstream OAuth 2.0 link was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.upstream_oauth_links.get", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let link = repo + .upstream_oauth_link() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + Ok(Json(SingleResponse::new_canonical( + UpstreamOAuthLink::from(link), + ))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + use ulid::Ulid; + + use super::super::test_utils; + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a provider and a link + let mut repo = state.repository().await.unwrap(); + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + test_utils::oidc_provider_params("provider1"), + ) + .await + .unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let link = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider, + "subject1".to_owned(), + None, + ) + .await + .unwrap(); + repo.upstream_oauth_link() + .associate_to_user(&link, &user) + .await + .unwrap(); + repo.save().await.unwrap(); + + let link_id = link.id; + let request = Request::get(format!("/api/admin/v1/upstream-oauth-links/{link_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "data": { + "type": "upstream-oauth-link", + "id": "01FSHN9AG09NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "subject": "subject1", + "user_id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "human_account_name": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG09NMZYX8MFYH578R9" + } + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG09NMZYX8MFYH578R9" + } + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let link_id = Ulid::nil(); + let request = Request::get(format!("/api/admin/v1/upstream-oauth-links/{link_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/list.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/list.rs new file mode 100644 index 00000000..c233a997 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/list.rs @@ -0,0 +1,739 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::{Page, upstream_oauth2::UpstreamOAuthLinkFilter}; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UpstreamOAuthLink}, + params::{IncludeCount, Pagination}, + response::{ErrorResponse, PaginatedResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(FromRequestParts, Deserialize, JsonSchema, OperationIo)] +#[serde(rename = "UpstreamOAuthLinkFilter")] +#[aide(input_with = "Query")] +#[from_request(via(Query), rejection(RouteError))] +pub struct FilterParams { + /// Retrieve the items for the given user + #[serde(rename = "filter[user]")] + #[schemars(with = "Option")] + user: Option, + + /// Retrieve the items for the given provider + #[serde(rename = "filter[provider]")] + #[schemars(with = "Option")] + provider: Option, + + /// Retrieve the items with the given subject + #[serde(rename = "filter[subject]")] + subject: Option, +} + +impl std::fmt::Display for FilterParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut sep = '?'; + + if let Some(user) = self.user { + write!(f, "{sep}filter[user]={user}")?; + sep = '&'; + } + + if let Some(provider) = self.provider { + write!(f, "{sep}filter[provider]={provider}")?; + sep = '&'; + } + + if let Some(subject) = &self.subject { + write!(f, "{sep}filter[subject]={subject}")?; + sep = '&'; + } + + let _ = sep; + Ok(()) + } +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + UserNotFound(Ulid), + + #[error("Provider ID {0} not found")] + ProviderNotFound(Ulid), + + #[error("Invalid filter parameters")] + InvalidFilter(#[from] QueryRejection), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::UserNotFound(_) | Self::ProviderNotFound(_) => StatusCode::NOT_FOUND, + Self::InvalidFilter(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("listUpstreamOAuthLinks") + .summary("List upstream OAuth 2.0 links") + .description("Retrieve a list of upstream OAuth 2.0 links.") + .tag("upstream-oauth-link") + .response_with::<200, Json>, _>(|t| { + let links = UpstreamOAuthLink::samples(); + let pagination = mas_storage::Pagination::first(links.len()); + let page = Page { + edges: links + .into_iter() + .map(|node| mas_storage::pagination::Edge { + cursor: node.id(), + node, + }) + .collect(), + has_next_page: true, + has_previous_page: false, + }; + + t.description("Paginated response of upstream OAuth 2.0 links") + .example(PaginatedResponse::for_page( + page, + pagination, + Some(42), + UpstreamOAuthLink::PATH, + )) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound(Ulid::nil())); + t.description("User or provider was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.upstream_oauth_links.list", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + Pagination(pagination, include_count): Pagination, + params: FilterParams, +) -> Result>, RouteError> { + let base = format!("{path}{params}", path = UpstreamOAuthLink::PATH); + let base = include_count.add_to_base(&base); + let filter = UpstreamOAuthLinkFilter::default(); + + // Load the user from the filter + let maybe_user = if let Some(user_id) = params.user { + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::UserNotFound(user_id))?; + Some(user) + } else { + None + }; + + let filter = if let Some(user) = &maybe_user { + filter.for_user(user) + } else { + filter + }; + + // Load the provider from the filter + let maybe_provider = if let Some(provider_id) = params.provider { + let provider = repo + .upstream_oauth_provider() + .lookup(provider_id) + .await? + .ok_or(RouteError::ProviderNotFound(provider_id))?; + Some(provider) + } else { + None + }; + + let filter = if let Some(provider) = &maybe_provider { + filter.for_provider(provider) + } else { + filter + }; + + let filter = if let Some(subject) = ¶ms.subject { + filter.for_subject(subject) + } else { + filter + }; + + let response = match include_count { + IncludeCount::True => { + let page = repo + .upstream_oauth_link() + .list(filter, pagination) + .await? + .map(UpstreamOAuthLink::from); + let count = repo.upstream_oauth_link().count(filter).await?; + PaginatedResponse::for_page(page, pagination, Some(count), &base) + } + IncludeCount::False => { + let page = repo + .upstream_oauth_link() + .list(filter, pagination) + .await? + .map(UpstreamOAuthLink::from); + PaginatedResponse::for_page(page, pagination, None, &base) + } + IncludeCount::Only => { + let count = repo.upstream_oauth_link().count(filter).await?; + PaginatedResponse::for_count_only(count, &base) + } + }; + + Ok(Json(response)) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + + use super::super::test_utils; + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_list(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision users and providers + let mut repo = state.repository().await.unwrap(); + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let bob = repo + .user() + .add(&mut rng, &state.clock, "bob".to_owned()) + .await + .unwrap(); + let provider1 = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + test_utils::oidc_provider_params("acme"), + ) + .await + .unwrap(); + let provider2 = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + test_utils::oidc_provider_params("example"), + ) + .await + .unwrap(); + + // Create some links + let link1 = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider1, + "subject1".to_owned(), + Some("alice@acme".to_owned()), + ) + .await + .unwrap(); + repo.upstream_oauth_link() + .associate_to_user(&link1, &alice) + .await + .unwrap(); + let link2 = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider2, + "subject2".to_owned(), + Some("alice@example".to_owned()), + ) + .await + .unwrap(); + repo.upstream_oauth_link() + .associate_to_user(&link2, &alice) + .await + .unwrap(); + let link3 = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider1, + "subject3".to_owned(), + Some("bob@acme".to_owned()), + ) + .await + .unwrap(); + repo.upstream_oauth_link() + .associate_to_user(&link3, &bob) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::get("/api/admin/v1/upstream-oauth-links") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 3 + }, + "data": [ + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0AQZQP8DX40GD59PW", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG09NMZYX8MFYH578R9", + "subject": "subject1", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@acme" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0AQZQP8DX40GD59PW" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0AQZQP8DX40GD59PW" + } + } + }, + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0PJZ6DZNTAA1XKPT4", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG09NMZYX8MFYH578R9", + "subject": "subject3", + "user_id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "human_account_name": "bob@acme" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0PJZ6DZNTAA1XKPT4" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0PJZ6DZNTAA1XKPT4" + } + } + }, + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0QHEHKX2JNQ2A2D07", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG0KEPHYQQXW9XPTX6Z", + "subject": "subject2", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@example" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0QHEHKX2JNQ2A2D07" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0QHEHKX2JNQ2A2D07" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-links?page[first]=10", + "first": "/api/admin/v1/upstream-oauth-links?page[first]=10", + "last": "/api/admin/v1/upstream-oauth-links?page[last]=10" + } + } + "#); + + // Filter by user ID + let request = Request::get(format!( + "/api/admin/v1/upstream-oauth-links?filter[user]={}", + alice.id + )) + .bearer(&token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0AQZQP8DX40GD59PW", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG09NMZYX8MFYH578R9", + "subject": "subject1", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@acme" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0AQZQP8DX40GD59PW" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0AQZQP8DX40GD59PW" + } + } + }, + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0QHEHKX2JNQ2A2D07", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG0KEPHYQQXW9XPTX6Z", + "subject": "subject2", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@example" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0QHEHKX2JNQ2A2D07" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0QHEHKX2JNQ2A2D07" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-links?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[first]=10", + "first": "/api/admin/v1/upstream-oauth-links?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[first]=10", + "last": "/api/admin/v1/upstream-oauth-links?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[last]=10" + } + } + "#); + + // Filter by provider + let request = Request::get(format!( + "/api/admin/v1/upstream-oauth-links?filter[provider]={}", + provider1.id + )) + .bearer(&token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0AQZQP8DX40GD59PW", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG09NMZYX8MFYH578R9", + "subject": "subject1", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@acme" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0AQZQP8DX40GD59PW" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0AQZQP8DX40GD59PW" + } + } + }, + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0PJZ6DZNTAA1XKPT4", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG09NMZYX8MFYH578R9", + "subject": "subject3", + "user_id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "human_account_name": "bob@acme" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0PJZ6DZNTAA1XKPT4" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0PJZ6DZNTAA1XKPT4" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-links?filter[provider]=01FSHN9AG09NMZYX8MFYH578R9&page[first]=10", + "first": "/api/admin/v1/upstream-oauth-links?filter[provider]=01FSHN9AG09NMZYX8MFYH578R9&page[first]=10", + "last": "/api/admin/v1/upstream-oauth-links?filter[provider]=01FSHN9AG09NMZYX8MFYH578R9&page[last]=10" + } + } + "#); + + // Filter by subject + let request = Request::get(format!( + "/api/admin/v1/upstream-oauth-links?filter[subject]={}", + "subject1" + )) + .bearer(&token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0AQZQP8DX40GD59PW", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG09NMZYX8MFYH578R9", + "subject": "subject1", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@acme" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0AQZQP8DX40GD59PW" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0AQZQP8DX40GD59PW" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-links?filter[subject]=subject1&page[first]=10", + "first": "/api/admin/v1/upstream-oauth-links?filter[subject]=subject1&page[first]=10", + "last": "/api/admin/v1/upstream-oauth-links?filter[subject]=subject1&page[last]=10" + } + } + "#); + + // Test count=false + let request = Request::get("/api/admin/v1/upstream-oauth-links?count=false") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0AQZQP8DX40GD59PW", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG09NMZYX8MFYH578R9", + "subject": "subject1", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@acme" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0AQZQP8DX40GD59PW" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0AQZQP8DX40GD59PW" + } + } + }, + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0PJZ6DZNTAA1XKPT4", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG09NMZYX8MFYH578R9", + "subject": "subject3", + "user_id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "human_account_name": "bob@acme" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0PJZ6DZNTAA1XKPT4" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0PJZ6DZNTAA1XKPT4" + } + } + }, + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0QHEHKX2JNQ2A2D07", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG0KEPHYQQXW9XPTX6Z", + "subject": "subject2", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@example" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0QHEHKX2JNQ2A2D07" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0QHEHKX2JNQ2A2D07" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-links?count=false&page[first]=10", + "first": "/api/admin/v1/upstream-oauth-links?count=false&page[first]=10", + "last": "/api/admin/v1/upstream-oauth-links?count=false&page[last]=10" + } + } + "#); + + // Test count=only + let request = Request::get("/api/admin/v1/upstream-oauth-links?count=only") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "meta": { + "count": 3 + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links?count=only" + } + } + "###); + + // Test count=false with filtering + let request = Request::get(format!( + "/api/admin/v1/upstream-oauth-links?count=false&filter[user]={}", + alice.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0AQZQP8DX40GD59PW", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG09NMZYX8MFYH578R9", + "subject": "subject1", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@acme" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0AQZQP8DX40GD59PW" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0AQZQP8DX40GD59PW" + } + } + }, + { + "type": "upstream-oauth-link", + "id": "01FSHN9AG0QHEHKX2JNQ2A2D07", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "provider_id": "01FSHN9AG0KEPHYQQXW9XPTX6Z", + "subject": "subject2", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_account_name": "alice@example" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01FSHN9AG0QHEHKX2JNQ2A2D07" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0QHEHKX2JNQ2A2D07" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-links?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[first]=10", + "first": "/api/admin/v1/upstream-oauth-links?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[first]=10", + "last": "/api/admin/v1/upstream-oauth-links?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[last]=10" + } + } + "#); + + // Test count=only with filtering + let request = Request::get(format!( + "/api/admin/v1/upstream-oauth-links?count=only&filter[provider]={}", + provider1.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links?filter[provider]=01FSHN9AG09NMZYX8MFYH578R9&count=only" + } + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/mod.rs new file mode 100644 index 00000000..3433aa3c --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_links/mod.rs @@ -0,0 +1,56 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod add; +mod delete; +mod get; +mod list; + +pub use self::{ + add::{doc as add_doc, handler as add}, + delete::{doc as delete_doc, handler as delete}, + get::{doc as get_doc, handler as get}, + list::{doc as list_doc, handler as list}, +}; + +#[cfg(test)] +mod test_utils { + use mas_data_model::{ + UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderDiscoveryMode, + UpstreamOAuthProviderOnBackchannelLogout, UpstreamOAuthProviderPkceMode, + UpstreamOAuthProviderTokenAuthMethod, + }; + use mas_iana::jose::JsonWebSignatureAlg; + use mas_storage::upstream_oauth2::UpstreamOAuthProviderParams; + use oauth2_types::scope::{OPENID, Scope}; + + pub(crate) fn oidc_provider_params(name: &str) -> UpstreamOAuthProviderParams { + UpstreamOAuthProviderParams { + issuer: Some(format!("https://{name}.example.com")), + human_name: Some(name.to_owned()), + brand_name: Some(name.to_owned()), + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::ClientSecretBasic, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + client_id: format!("client_{name}"), + encrypted_client_secret: Some("secret".to_owned()), + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + discovery_mode: UpstreamOAuthProviderDiscoveryMode::default(), + pkce_mode: UpstreamOAuthProviderPkceMode::default(), + response_mode: None, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + jwks_uri_override: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/get.rs new file mode 100644 index 00000000..3700e1a6 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/get.rs @@ -0,0 +1,196 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::{RepositoryAccess, upstream_oauth2::UpstreamOAuthProviderRepository}; + +use crate::{ + admin::{ + call_context::CallContext, + model::UpstreamOAuthProvider, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Provider not found")] + NotFound, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound => StatusCode::NOT_FOUND, + }; + + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getUpstreamOAuthProvider") + .summary("Get upstream OAuth provider") + .tag("upstream-oauth-provider") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = UpstreamOAuthProvider::samples(); + t.description("The upstream OAuth provider") + .example(SingleResponse::new_canonical(sample)) + }) + .response_with::<404, Json, _>(|t| t.description("Provider not found")) +} + +#[tracing::instrument(name = "handler.admin.v1.upstream_oauth_providers.get", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let provider = repo + .upstream_oauth_provider() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound)?; + + Ok(Json(SingleResponse::new_canonical( + UpstreamOAuthProvider::from(provider), + ))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_data_model::{ + UpstreamOAuthProvider, UpstreamOAuthProviderClaimsImports, + UpstreamOAuthProviderDiscoveryMode, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderPkceMode, UpstreamOAuthProviderTokenAuthMethod, + }; + use mas_iana::jose::JsonWebSignatureAlg; + use mas_storage::{ + RepositoryAccess, + upstream_oauth2::{UpstreamOAuthProviderParams, UpstreamOAuthProviderRepository}, + }; + use oauth2_types::scope::{OPENID, Scope}; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + async fn create_test_provider(state: &mut TestState) -> UpstreamOAuthProvider { + let mut repo = state.repository().await.unwrap(); + + let params = UpstreamOAuthProviderParams { + issuer: Some("https://accounts.google.com".to_owned()), + human_name: Some("Google".to_owned()), + brand_name: Some("google".to_owned()), + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: UpstreamOAuthProviderPkceMode::Auto, + jwks_uri_override: None, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: true, + userinfo_signed_response_alg: None, + client_id: "google-client-id".to_owned(), + encrypted_client_secret: Some("encrypted-secret".to_owned()), + token_endpoint_signing_alg: None, + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::ClientSecretPost, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + response_mode: None, + scope: Scope::from_iter([OPENID]), + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + additional_authorization_parameters: vec![], + forward_login_hint: false, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + ui_order: 0, + }; + + let provider = repo + .upstream_oauth_provider() + .add(&mut state.rng(), &state.clock, params) + .await + .unwrap(); + + Box::new(repo).save().await.unwrap(); + + provider + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get_provider(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + let provider = create_test_provider(&mut state).await; + + let request = Request::get(format!( + "/api/admin/v1/upstream-oauth-providers/{}", + provider.id + )) + .bearer(&admin_token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + assert_eq!(body["data"]["type"], "upstream-oauth-provider"); + assert_eq!(body["data"]["id"], provider.id.to_string()); + assert_eq!(body["data"]["attributes"]["human_name"], "Google"); + + insta::assert_json_snapshot!(body, @r###" + { + "data": { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "issuer": "https://accounts.google.com", + "human_name": "Google", + "brand_name": "google", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + + let provider_id = Ulid::nil(); + let request = Request::get(format!( + "/api/admin/v1/upstream-oauth-providers/{provider_id}" + )) + .bearer(&admin_token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/list.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/list.rs new file mode 100644 index 00000000..d70bbd29 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/list.rs @@ -0,0 +1,799 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::{Page, upstream_oauth2::UpstreamOAuthProviderFilter}; +use schemars::JsonSchema; +use serde::Deserialize; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UpstreamOAuthProvider}, + params::{IncludeCount, Pagination}, + response::{ErrorResponse, PaginatedResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(FromRequestParts, Deserialize, JsonSchema, OperationIo)] +#[serde(rename = "UpstreamOAuthProviderFilter")] +#[aide(input_with = "Query")] +#[from_request(via(Query), rejection(RouteError))] +pub struct FilterParams { + /// Retrieve providers that are (or are not) enabled + #[serde(rename = "filter[enabled]")] + enabled: Option, +} + +impl std::fmt::Display for FilterParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut sep = '?'; + + if let Some(enabled) = self.enabled { + write!(f, "{sep}filter[enabled]={enabled}")?; + sep = '&'; + } + + let _ = sep; + Ok(()) + } +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Invalid filter parameters")] + InvalidFilter(#[from] QueryRejection), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::InvalidFilter(_) => StatusCode::BAD_REQUEST, + }; + + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("listUpstreamOAuthProviders") + .summary("List upstream OAuth 2.0 providers") + .tag("upstream-oauth-provider") + .response_with::<200, Json>, _>(|t| { + let providers = UpstreamOAuthProvider::samples(); + let pagination = mas_storage::Pagination::first(providers.len()); + let page = Page { + edges: providers + .into_iter() + .map(|node| mas_storage::pagination::Edge { + cursor: node.id(), + node, + }) + .collect(), + has_next_page: true, + has_previous_page: false, + }; + + t.description("Paginated response of upstream OAuth 2.0 providers") + .example(PaginatedResponse::for_page( + page, + pagination, + Some(42), + UpstreamOAuthProvider::PATH, + )) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.upstream_oauth_providers.list", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + Pagination(pagination, include_count): Pagination, + params: FilterParams, +) -> Result>, RouteError> { + let base = format!("{path}{params}", path = UpstreamOAuthProvider::PATH); + let base = include_count.add_to_base(&base); + let filter = UpstreamOAuthProviderFilter::new(); + + let filter = match params.enabled { + Some(true) => filter.enabled_only(), + Some(false) => filter.disabled_only(), + None => filter, + }; + + let response = match include_count { + IncludeCount::True => { + let page = repo + .upstream_oauth_provider() + .list(filter, pagination) + .await? + .map(UpstreamOAuthProvider::from); + let count = repo.upstream_oauth_provider().count(filter).await?; + PaginatedResponse::for_page(page, pagination, Some(count), &base) + } + IncludeCount::False => { + let page = repo + .upstream_oauth_provider() + .list(filter, pagination) + .await? + .map(UpstreamOAuthProvider::from); + PaginatedResponse::for_page(page, pagination, None, &base) + } + IncludeCount::Only => { + let count = repo.upstream_oauth_provider().count(filter).await?; + PaginatedResponse::for_count_only(count, &base) + } + }; + + Ok(Json(response)) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_data_model::{ + UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderDiscoveryMode, + UpstreamOAuthProviderOnBackchannelLogout, UpstreamOAuthProviderPkceMode, + UpstreamOAuthProviderTokenAuthMethod, + }; + use mas_iana::jose::JsonWebSignatureAlg; + use mas_storage::{ + RepositoryAccess, + upstream_oauth2::{UpstreamOAuthProviderParams, UpstreamOAuthProviderRepository}, + }; + use oauth2_types::scope::{OPENID, Scope}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + async fn create_test_providers(state: &mut TestState) { + let mut repo = state.repository().await.unwrap(); + + // Create an enabled provider + let enabled_params = UpstreamOAuthProviderParams { + issuer: Some("https://accounts.google.com".to_owned()), + human_name: Some("Google".to_owned()), + brand_name: Some("google".to_owned()), + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: UpstreamOAuthProviderPkceMode::Auto, + jwks_uri_override: None, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: true, + userinfo_signed_response_alg: None, + client_id: "google-client-id".to_owned(), + encrypted_client_secret: Some("encrypted-secret".to_owned()), + token_endpoint_signing_alg: None, + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::ClientSecretPost, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + response_mode: None, + scope: Scope::from_iter([OPENID]), + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + additional_authorization_parameters: vec![], + forward_login_hint: false, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + ui_order: 0, + }; + + repo.upstream_oauth_provider() + .add(&mut state.rng(), &state.clock, enabled_params) + .await + .unwrap(); + + // Create a disabled provider + let disabled_params = UpstreamOAuthProviderParams { + issuer: Some("https://appleid.apple.com".to_owned()), + human_name: Some("Apple ID".to_owned()), + brand_name: Some("apple".to_owned()), + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: UpstreamOAuthProviderPkceMode::S256, + jwks_uri_override: None, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: true, + userinfo_signed_response_alg: None, + client_id: "apple-client-id".to_owned(), + encrypted_client_secret: Some("encrypted-secret".to_owned()), + token_endpoint_signing_alg: None, + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::ClientSecretPost, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + response_mode: None, + scope: Scope::from_iter([OPENID]), + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + additional_authorization_parameters: vec![], + forward_login_hint: false, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + ui_order: 1, + }; + + let disabled_provider = repo + .upstream_oauth_provider() + .add(&mut state.rng(), &state.clock, disabled_params) + .await + .unwrap(); + + // Disable the provider + repo.upstream_oauth_provider() + .disable(&state.clock, disabled_provider) + .await + .unwrap(); + + // Create another enabled provider + let another_enabled_params = UpstreamOAuthProviderParams { + issuer: Some("https://login.microsoftonline.com/common/v2.0".to_owned()), + human_name: Some("Microsoft".to_owned()), + brand_name: Some("microsoft".to_owned()), + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: UpstreamOAuthProviderPkceMode::Auto, + jwks_uri_override: None, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: true, + userinfo_signed_response_alg: None, + client_id: "microsoft-client-id".to_owned(), + encrypted_client_secret: Some("encrypted-secret".to_owned()), + token_endpoint_signing_alg: None, + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::ClientSecretPost, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + response_mode: None, + scope: Scope::from_iter([OPENID]), + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + additional_authorization_parameters: vec![], + forward_login_hint: false, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + ui_order: 2, + }; + + repo.upstream_oauth_provider() + .add(&mut state.rng(), &state.clock, another_enabled_params) + .await + .unwrap(); + + Box::new(repo).save().await.unwrap(); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_list_all_providers(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_providers(&mut state).await; + + let request = Request::get("/api/admin/v1/upstream-oauth-providers") + .bearer(&admin_token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + // Should return all providers + assert_eq!(body["data"].as_array().unwrap().len(), 3); + + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 3 + }, + "data": [ + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "issuer": "https://appleid.apple.com", + "human_name": "Apple ID", + "brand_name": "apple", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "issuer": "https://login.microsoftonline.com/common/v2.0", + "human_name": "Microsoft", + "brand_name": "microsoft", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "issuer": "https://accounts.google.com", + "human_name": "Google", + "brand_name": "google", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?page[first]=10", + "first": "/api/admin/v1/upstream-oauth-providers?page[first]=10", + "last": "/api/admin/v1/upstream-oauth-providers?page[last]=10" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_filter_by_enabled_true(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_providers(&mut state).await; + + let request = Request::get("/api/admin/v1/upstream-oauth-providers?filter[enabled]=true") + .bearer(&admin_token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "issuer": "https://login.microsoftonline.com/common/v2.0", + "human_name": "Microsoft", + "brand_name": "microsoft", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "issuer": "https://accounts.google.com", + "human_name": "Google", + "brand_name": "google", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=true&page[first]=10", + "first": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=true&page[first]=10", + "last": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=true&page[last]=10" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_filter_by_enabled_false(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_providers(&mut state).await; + + let request = Request::get("/api/admin/v1/upstream-oauth-providers?filter[enabled]=false") + .bearer(&admin_token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "issuer": "https://appleid.apple.com", + "human_name": "Apple ID", + "brand_name": "apple", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=false&page[first]=10", + "first": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=false&page[first]=10", + "last": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=false&page[last]=10" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_pagination(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_providers(&mut state).await; + + // Test first page with limit of 2 + let request = Request::get("/api/admin/v1/upstream-oauth-providers?page[first]=2") + .bearer(&admin_token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 3 + }, + "data": [ + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "issuer": "https://appleid.apple.com", + "human_name": "Apple ID", + "brand_name": "apple", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "issuer": "https://login.microsoftonline.com/common/v2.0", + "human_name": "Microsoft", + "brand_name": "microsoft", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?page[first]=2", + "first": "/api/admin/v1/upstream-oauth-providers?page[first]=2", + "last": "/api/admin/v1/upstream-oauth-providers?page[last]=2", + "next": "/api/admin/v1/upstream-oauth-providers?page[after]=01FSHN9AG09AVTNSQFMSR34AJC&page[first]=2" + } + } + "#); + + // Extract the ID of the last item for pagination + let last_item_id = body["data"][1]["id"].as_str().unwrap(); + let request = Request::get(format!( + "/api/admin/v1/upstream-oauth-providers?page[first]=2&page[after]={last_item_id}", + )) + .bearer(&admin_token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 3 + }, + "data": [ + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "issuer": "https://accounts.google.com", + "human_name": "Google", + "brand_name": "google", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?page[after]=01FSHN9AG09AVTNSQFMSR34AJC&page[first]=2", + "first": "/api/admin/v1/upstream-oauth-providers?page[first]=2", + "last": "/api/admin/v1/upstream-oauth-providers?page[last]=2" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_invalid_filter(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + + let request = + Request::get("/api/admin/v1/upstream-oauth-providers?filter[enabled]=invalid") + .bearer(&admin_token) + .empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_count_parameter(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_providers(&mut state).await; + + // Test count=false + let request = Request::get("/api/admin/v1/upstream-oauth-providers?count=false") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + insta::assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "issuer": "https://appleid.apple.com", + "human_name": "Apple ID", + "brand_name": "apple", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "issuer": "https://login.microsoftonline.com/common/v2.0", + "human_name": "Microsoft", + "brand_name": "microsoft", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "issuer": "https://accounts.google.com", + "human_name": "Google", + "brand_name": "google", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?count=false&page[first]=10", + "first": "/api/admin/v1/upstream-oauth-providers?count=false&page[first]=10", + "last": "/api/admin/v1/upstream-oauth-providers?count=false&page[last]=10" + } + } + "#); + + // Test count=only + let request = Request::get("/api/admin/v1/upstream-oauth-providers?count=only") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 3 + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?count=only" + } + } + "#); + + // Test count=false with filtering + let request = + Request::get("/api/admin/v1/upstream-oauth-providers?count=false&filter[enabled]=true") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + insta::assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "issuer": "https://login.microsoftonline.com/common/v2.0", + "human_name": "Microsoft", + "brand_name": "microsoft", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "upstream-oauth-provider", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "issuer": "https://accounts.google.com", + "human_name": "Google", + "brand_name": "google", + "created_at": "2022-01-16T14:40:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=true&count=false&page[first]=10", + "first": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=true&count=false&page[first]=10", + "last": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=true&count=false&page[last]=10" + } + } + "#); + + // Test count=only with filtering + let request = + Request::get("/api/admin/v1/upstream-oauth-providers?count=only&filter[enabled]=false") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json::(); + + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?filter[enabled]=false&count=only" + } + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/mod.rs new file mode 100644 index 00000000..18ffe5af --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/upstream_oauth_providers/mod.rs @@ -0,0 +1,12 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod get; +mod list; + +pub use self::{ + get::{doc as get_doc, handler as get}, + list::{doc as list_doc, handler as list}, +}; diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/add.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/add.rs new file mode 100644 index 00000000..10622b61 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/add.rs @@ -0,0 +1,324 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::str::FromStr as _; + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use mas_storage::{ + queue::{ProvisionUserJob, QueueJobRepositoryExt as _}, + user::UserEmailFilter, +}; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::UserEmail, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User email {0:?} already in use")] + EmailAlreadyInUse(String), + + #[error("Email {email:?} is not valid")] + EmailNotValid { + email: String, + + #[source] + source: lettre::address::AddressError, + }, + + #[error("User ID {0} not found")] + UserNotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::EmailAlreadyInUse(_) => StatusCode::CONFLICT, + Self::EmailNotValid { .. } => StatusCode::BAD_REQUEST, + Self::UserNotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +/// # JSON payload for the `POST /api/admin/v1/user-emails` +#[derive(Deserialize, JsonSchema)] +#[serde(rename = "AddUserEmailRequest")] +pub struct Request { + /// The ID of the user to which the email should be added. + #[schemars(with = "crate::admin::schema::Ulid")] + user_id: Ulid, + + /// The email address of the user to add. + #[schemars(email)] + email: String, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("addUserEmail") + .summary("Add a user email") + .description(r"Add an email address to a user. +Note that this endpoint ignores any policy which would normally prevent the email from being added.") + .tag("user-email") + .response_with::<201, Json>, _>(|t| { + let [sample, ..] = UserEmail::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("User email was created").example(response) + }) + .response_with::<409, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::EmailAlreadyInUse( + "alice@example.com".to_owned(), + )); + t.description("Email already in use").example(response) + }) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::EmailNotValid { + email: "not a valid email".to_owned(), + source: lettre::address::AddressError::MissingParts, + }); + t.description("Email is not valid").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound(Ulid::nil())); + t.description("User was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_emails.add", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + Json(params): Json, +) -> Result<(StatusCode, Json>), RouteError> { + // Find the user + let user = repo + .user() + .lookup(params.user_id) + .await? + .ok_or(RouteError::UserNotFound(params.user_id))?; + + // Validate the email + if let Err(source) = lettre::Address::from_str(¶ms.email) { + return Err(RouteError::EmailNotValid { + email: params.email, + source, + }); + } + + // Check if the email already exists + let count = repo + .user_email() + .count(UserEmailFilter::new().for_email(¶ms.email)) + .await?; + + if count > 0 { + return Err(RouteError::EmailAlreadyInUse(params.email)); + } + + // Add the email to the user + let user_email = repo + .user_email() + .add(&mut rng, &clock, &user, params.email) + .await?; + + // Schedule a job to update the user + repo.queue_job() + .schedule_job(&mut rng, &clock, ProvisionUserJob::new_for_id(user.id)) + .await?; + + repo.save().await?; + + Ok(( + StatusCode::CREATED, + Json(SingleResponse::new_canonical(user_email.into())), + )) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_create(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a user + let mut repo = state.repository().await.unwrap(); + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::post("/api/admin/v1/user-emails") + .bearer(&token) + .json(serde_json::json!({ + "email": "alice@example.com", + "user_id": alice.id, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "data": { + "type": "user-email", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG07HNEZXNQM2KNBNF6" + } + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_user_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/user-emails") + .bearer(&token) + .json(serde_json::json!({ + "email": "alice@example.com", + "user_id": Ulid::nil(), + })); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_email_already_exists(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + let mut repo = state.repository().await.unwrap(); + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + repo.user_email() + .add( + &mut rng, + &state.clock, + &alice, + "alice@example.com".to_owned(), + ) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::post("/api/admin/v1/user-emails") + .bearer(&token) + .json(serde_json::json!({ + "email": "alice@example.com", + "user_id": alice.id, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CONFLICT); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "errors": [ + { + "title": "User email \"alice@example.com\" already in use" + } + ] + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_invalid_email(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + let mut repo = state.repository().await.unwrap(); + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::post("/api/admin/v1/user-emails") + .bearer(&token) + .json(serde_json::json!({ + "email": "invalid-email", + "user_id": alice.id, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "errors": [ + { + "title": "Email \"invalid-email\" is not valid" + }, + { + "title": "Missing domain or user" + } + ] + } + "###); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/delete.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/delete.rs new file mode 100644 index 00000000..133e8599 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/delete.rs @@ -0,0 +1,141 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use mas_storage::queue::{ProvisionUserJob, QueueJobRepositoryExt as _}; +use ulid::Ulid; + +use crate::{ + admin::{call_context::CallContext, params::UlidPathParam, response::ErrorResponse}, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User email ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("deleteUserEmail") + .summary("Delete a user email") + .tag("user-email") + .response_with::<204, (), _>(|t| t.description("User email was found")) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User email was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_emails.delete", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + id: UlidPathParam, +) -> Result { + let email = repo + .user_email() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + let job = ProvisionUserJob::new_for_id(email.user_id); + repo.user_email().remove(email).await?; + + // Schedule a job to update the user + repo.queue_job().schedule_job(&mut rng, &clock, job).await?; + + repo.save().await?; + + Ok(StatusCode::NO_CONTENT) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_delete(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a user and an email + let mut repo = state.repository().await.unwrap(); + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let mas_data_model::UserEmail { id, .. } = repo + .user_email() + .add( + &mut rng, + &state.clock, + &alice, + "alice@example.com".to_owned(), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::delete(format!("/api/admin/v1/user-emails/{id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NO_CONTENT); + + // Verify that the email was deleted + let request = Request::get(format!("/api/admin/v1/user-emails/{id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let email_id = Ulid::nil(); + let request = Request::delete(format!("/api/admin/v1/user-emails/{email_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/get.rs new file mode 100644 index 00000000..826cb8c2 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/get.rs @@ -0,0 +1,152 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::UserEmail, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User email ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getUserEmail") + .summary("Get a user email") + .tag("user-email") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = UserEmail::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("User email was found").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User email was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_emails.get", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let email = repo + .user_email() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + Ok(Json(SingleResponse::new_canonical(UserEmail::from(email)))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a user and an email + let mut repo = state.repository().await.unwrap(); + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let mas_data_model::UserEmail { id, .. } = repo + .user_email() + .add( + &mut rng, + &state.clock, + &alice, + "alice@example.com".to_owned(), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::get(format!("/api/admin/v1/user-emails/{id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_eq!(body["data"]["type"], "user-email"); + insta::assert_json_snapshot!(body, @r###" + { + "data": { + "type": "user-email", + "id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + } + "###); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_not_found(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let email_id = Ulid::nil(); + let request = Request::get(format!("/api/admin/v1/user-emails/{email_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/list.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/list.rs new file mode 100644 index 00000000..453ef0e8 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/list.rs @@ -0,0 +1,493 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::{Page, user::UserEmailFilter}; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UserEmail}, + params::{IncludeCount, Pagination}, + response::{ErrorResponse, PaginatedResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(FromRequestParts, Deserialize, JsonSchema, OperationIo)] +#[serde(rename = "UserEmailFilter")] +#[aide(input_with = "Query")] +#[from_request(via(Query), rejection(RouteError))] +pub struct FilterParams { + /// Retrieve the items for the given user + #[serde(rename = "filter[user]")] + #[schemars(with = "Option")] + user: Option, + + /// Retrieve the user email with the given email address + #[serde(rename = "filter[email]")] + email: Option, +} + +impl std::fmt::Display for FilterParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut sep = '?'; + + if let Some(user) = self.user { + write!(f, "{sep}filter[user]={user}")?; + sep = '&'; + } + + if let Some(email) = &self.email { + write!(f, "{sep}filter[email]={email}")?; + sep = '&'; + } + + let _ = sep; + Ok(()) + } +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + UserNotFound(Ulid), + + #[error("Invalid filter parameters")] + InvalidFilter(#[from] QueryRejection), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::UserNotFound(_) => StatusCode::NOT_FOUND, + Self::InvalidFilter(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("listUserEmails") + .summary("List user emails") + .description("Retrieve a list of user emails.") + .tag("user-email") + .response_with::<200, Json>, _>(|t| { + let emails = UserEmail::samples(); + let pagination = mas_storage::Pagination::first(emails.len()); + let page = Page { + edges: emails + .into_iter() + .map(|node| mas_storage::pagination::Edge { + cursor: node.id(), + node, + }) + .collect(), + has_next_page: true, + has_previous_page: false, + }; + + t.description("Paginated response of user emails") + .example(PaginatedResponse::for_page( + page, + pagination, + Some(42), + UserEmail::PATH, + )) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound(Ulid::nil())); + t.description("User was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_emails.list", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + Pagination(pagination, include_count): Pagination, + params: FilterParams, +) -> Result>, RouteError> { + let base = format!("{path}{params}", path = UserEmail::PATH); + let base = include_count.add_to_base(&base); + let filter = UserEmailFilter::default(); + + // Load the user from the filter + let user = if let Some(user_id) = params.user { + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::UserNotFound(user_id))?; + + Some(user) + } else { + None + }; + + let filter = match &user { + Some(user) => filter.for_user(user), + None => filter, + }; + + let filter = match ¶ms.email { + Some(email) => filter.for_email(email), + None => filter, + }; + + let response = match include_count { + IncludeCount::True => { + let page = repo + .user_email() + .list(filter, pagination) + .await? + .map(UserEmail::from); + let count = repo.user_email().count(filter).await?; + PaginatedResponse::for_page(page, pagination, Some(count), &base) + } + IncludeCount::False => { + let page = repo + .user_email() + .list(filter, pagination) + .await? + .map(UserEmail::from); + PaginatedResponse::for_page(page, pagination, None, &base) + } + IncludeCount::Only => { + let count = repo.user_email().count(filter).await?; + PaginatedResponse::for_count_only(count, &base) + } + }; + + Ok(Json(response)) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_list(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision two users, two emails + let mut repo = state.repository().await.unwrap(); + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let bob = repo + .user() + .add(&mut rng, &state.clock, "bob".to_owned()) + .await + .unwrap(); + + repo.user_email() + .add( + &mut rng, + &state.clock, + &alice, + "alice@example.com".to_owned(), + ) + .await + .unwrap(); + repo.user_email() + .add(&mut rng, &state.clock, &bob, "bob@example.com".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::get("/api/admin/v1/user-emails") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "user-email", + "id": "01FSHN9AG09NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG09NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09NMZYX8MFYH578R9" + } + } + }, + { + "type": "user-email", + "id": "01FSHN9AG0KEPHYQQXW9XPTX6Z", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "user_id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "email": "bob@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG0KEPHYQQXW9XPTX6Z" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0KEPHYQQXW9XPTX6Z" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-emails?page[first]=10", + "first": "/api/admin/v1/user-emails?page[first]=10", + "last": "/api/admin/v1/user-emails?page[last]=10" + } + } + "#); + + // Filter by user + let request = Request::get(format!( + "/api/admin/v1/user-emails?filter[user]={}", + alice.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "user-email", + "id": "01FSHN9AG09NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG09NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09NMZYX8MFYH578R9" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-emails?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[first]=10", + "first": "/api/admin/v1/user-emails?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[first]=10", + "last": "/api/admin/v1/user-emails?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[last]=10" + } + } + "#); + + // Filter by email + let request = Request::get("/api/admin/v1/user-emails?filter[email]=alice@example.com") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "user-email", + "id": "01FSHN9AG09NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG09NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09NMZYX8MFYH578R9" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-emails?filter[email]=alice@example.com&page[first]=10", + "first": "/api/admin/v1/user-emails?filter[email]=alice@example.com&page[first]=10", + "last": "/api/admin/v1/user-emails?filter[email]=alice@example.com&page[last]=10" + } + } + "#); + + // Test count=false + let request = Request::get("/api/admin/v1/user-emails?count=false") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "user-email", + "id": "01FSHN9AG09NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG09NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09NMZYX8MFYH578R9" + } + } + }, + { + "type": "user-email", + "id": "01FSHN9AG0KEPHYQQXW9XPTX6Z", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "user_id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "email": "bob@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG0KEPHYQQXW9XPTX6Z" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0KEPHYQQXW9XPTX6Z" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-emails?count=false&page[first]=10", + "first": "/api/admin/v1/user-emails?count=false&page[first]=10", + "last": "/api/admin/v1/user-emails?count=false&page[last]=10" + } + } + "#); + + // Test count=only + let request = Request::get("/api/admin/v1/user-emails?count=only") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "meta": { + "count": 2 + }, + "links": { + "self": "/api/admin/v1/user-emails?count=only" + } + } + "###); + + // Test count=false with filtering + let request = Request::get(format!( + "/api/admin/v1/user-emails?count=false&filter[user]={}", + alice.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "user-email", + "id": "01FSHN9AG09NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01FSHN9AG09NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09NMZYX8MFYH578R9" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-emails?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[first]=10", + "first": "/api/admin/v1/user-emails?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[first]=10", + "last": "/api/admin/v1/user-emails?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[last]=10" + } + } + "#); + + // Test count=only with filtering + let request = Request::get(format!( + "/api/admin/v1/user-emails?count=only&filter[user]={}", + alice.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "links": { + "self": "/api/admin/v1/user-emails?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=only" + } + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/mod.rs new file mode 100644 index 00000000..38f3ec98 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_emails/mod.rs @@ -0,0 +1,16 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod add; +mod delete; +mod get; +mod list; + +pub use self::{ + add::{doc as add_doc, handler as add}, + delete::{doc as delete_doc, handler as delete}, + get::{doc as get_doc, handler as get}, + list::{doc as list_doc, handler as list}, +}; diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/add.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/add.rs new file mode 100644 index 00000000..3e641e25 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/add.rs @@ -0,0 +1,262 @@ +// Copyright 2025 New Vector Ltd. +// Copyright 2025 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use chrono::{DateTime, Utc}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use rand::distributions::{Alphanumeric, DistString}; +use schemars::JsonSchema; +use serde::Deserialize; + +use crate::{ + admin::{ + call_context::CallContext, + model::UserRegistrationToken, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error("A registration token with the same token already exists")] + Conflict(mas_data_model::UserRegistrationToken), + + #[error(transparent)] + Internal(Box), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Conflict(_) => StatusCode::CONFLICT, + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +/// # JSON payload for the `POST /api/admin/v1/user-registration-tokens` +#[derive(Deserialize, JsonSchema)] +#[serde(rename = "AddUserRegistrationTokenRequest")] +pub struct Request { + /// The token string. If not provided, a random token will be generated. + token: Option, + + /// Maximum number of times this token can be used. If not provided, the + /// token can be used an unlimited number of times. + usage_limit: Option, + + /// When the token expires. If not provided, the token never expires. + expires_at: Option>, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("addUserRegistrationToken") + .summary("Create a new user registration token") + .tag("user-registration-token") + .response_with::<201, Json>, _>(|t| { + let [sample, ..] = UserRegistrationToken::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("A new user registration token was created") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_registration_tokens.post", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + Json(params): Json, +) -> Result<(StatusCode, Json>), RouteError> { + // Generate a random token if none was provided + let token = params + .token + .unwrap_or_else(|| Alphanumeric.sample_string(&mut rng, 12)); + + // See if we have an existing token with the same token + let existing_token = repo.user_registration_token().find_by_token(&token).await?; + if let Some(existing_token) = existing_token { + return Err(RouteError::Conflict(existing_token)); + } + + let registration_token = repo + .user_registration_token() + .add( + &mut rng, + &clock, + token, + params.usage_limit, + params.expires_at, + ) + .await?; + + repo.save().await?; + + Ok(( + StatusCode::CREATED, + Json(SingleResponse::new_canonical(UserRegistrationToken::new( + registration_token, + clock.now(), + ))), + )) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_create(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/user-registration-tokens") + .bearer(&token) + .json(serde_json::json!({ + "token": "test_token_123", + "usage_limit": 5, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let body: serde_json::Value = response.json(); + + assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_token_123", + "valid": true, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_create_auto_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/user-registration-tokens") + .bearer(&token) + .json(serde_json::json!({ + "usage_limit": 1 + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let body: serde_json::Value = response.json(); + + assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0QMGC989M0XSFVF2X", + "attributes": { + "token": "42oTpLoieH5I", + "valid": true, + "usage_limit": 1, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0QMGC989M0XSFVF2X" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0QMGC989M0XSFVF2X" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_create_conflict(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/user-registration-tokens") + .bearer(&token) + .json(serde_json::json!({ + "token": "test_token_123", + "usage_limit": 5 + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let body: serde_json::Value = response.json(); + + assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_token_123", + "valid": true, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + + let request = Request::post("/api/admin/v1/user-registration-tokens") + .bearer(&token) + .json(serde_json::json!({ + "token": "test_token_123", + "usage_limit": 5 + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CONFLICT); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/get.rs new file mode 100644 index 00000000..187c1903 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/get.rs @@ -0,0 +1,175 @@ +// Copyright 2025 New Vector Ltd. +// Copyright 2025 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::UserRegistrationToken, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Registration token with ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getUserRegistrationToken") + .summary("Get a user registration token") + .tag("user-registration-token") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = UserRegistrationToken::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("Registration token was found") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Registration token was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_registration_tokens.get", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let token = repo + .user_registration_token() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + Ok(Json(SingleResponse::new_canonical( + UserRegistrationToken::new(token, clock.now()), + ))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let registration_token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "test_token_123".to_owned(), + Some(5), + None, + ) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::get(format!( + "/api/admin/v1/user-registration-tokens/{}", + registration_token.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_token_123", + "valid": true, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get_nonexistent_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Use a fixed ID for the test to ensure consistent snapshots + let nonexistent_id = Ulid::from_string("00000000000000000000000000").unwrap(); + let request = Request::get(format!( + "/api/admin/v1/user-registration-tokens/{nonexistent_id}" + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + + assert_json_snapshot!(body, @r###" + { + "errors": [ + { + "title": "Registration token with ID 00000000000000000000000000 not found" + } + ] + } + "###); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/list.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/list.rs new file mode 100644 index 00000000..26e92540 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/list.rs @@ -0,0 +1,1585 @@ +// Copyright 2025 New Vector Ltd. +// Copyright 2025 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::{Page, user::UserRegistrationTokenFilter}; +use schemars::JsonSchema; +use serde::Deserialize; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UserRegistrationToken}, + params::{IncludeCount, Pagination}, + response::{ErrorResponse, PaginatedResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(FromRequestParts, Deserialize, JsonSchema, OperationIo)] +#[serde(rename = "RegistrationTokenFilter")] +#[aide(input_with = "Query")] +#[from_request(via(Query), rejection(RouteError))] +pub struct FilterParams { + /// Retrieve tokens that have (or have not) been used at least once + #[serde(rename = "filter[used]")] + used: Option, + + /// Retrieve tokens that are (or are not) revoked + #[serde(rename = "filter[revoked]")] + revoked: Option, + + /// Retrieve tokens that are (or are not) expired + #[serde(rename = "filter[expired]")] + expired: Option, + + /// Retrieve tokens that are (or are not) valid + /// + /// Valid means that the token has not expired, is not revoked, and has not + /// reached its usage limit. + #[serde(rename = "filter[valid]")] + valid: Option, +} + +impl std::fmt::Display for FilterParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut sep = '?'; + + if let Some(used) = self.used { + write!(f, "{sep}filter[used]={used}")?; + sep = '&'; + } + if let Some(revoked) = self.revoked { + write!(f, "{sep}filter[revoked]={revoked}")?; + sep = '&'; + } + if let Some(expired) = self.expired { + write!(f, "{sep}filter[expired]={expired}")?; + sep = '&'; + } + if let Some(valid) = self.valid { + write!(f, "{sep}filter[valid]={valid}")?; + sep = '&'; + } + + let _ = sep; + Ok(()) + } +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Invalid filter parameters")] + InvalidFilter(#[from] QueryRejection), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::InvalidFilter(_) => StatusCode::BAD_REQUEST, + }; + + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("listUserRegistrationTokens") + .summary("List user registration tokens") + .tag("user-registration-token") + .response_with::<200, Json>, _>(|t| { + let tokens = UserRegistrationToken::samples(); + let pagination = mas_storage::Pagination::first(tokens.len()); + let page = Page { + edges: tokens + .into_iter() + .map(|node| mas_storage::pagination::Edge { + cursor: node.id(), + node, + }) + .collect(), + has_next_page: true, + has_previous_page: false, + }; + + t.description("Paginated response of registration tokens") + .example(PaginatedResponse::for_page( + page, + pagination, + Some(42), + UserRegistrationToken::PATH, + )) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.registration_tokens.list", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + Pagination(pagination, include_count): Pagination, + params: FilterParams, +) -> Result>, RouteError> { + let base = format!("{path}{params}", path = UserRegistrationToken::PATH); + let base = include_count.add_to_base(&base); + let now = clock.now(); + let mut filter = UserRegistrationTokenFilter::new(now); + + if let Some(used) = params.used { + filter = filter.with_been_used(used); + } + + if let Some(revoked) = params.revoked { + filter = filter.with_revoked(revoked); + } + + if let Some(expired) = params.expired { + filter = filter.with_expired(expired); + } + + if let Some(valid) = params.valid { + filter = filter.with_valid(valid); + } + + let response = match include_count { + IncludeCount::True => { + let page = repo + .user_registration_token() + .list(filter, pagination) + .await? + .map(|token| UserRegistrationToken::new(token, now)); + let count = repo.user_registration_token().count(filter).await?; + PaginatedResponse::for_page(page, pagination, Some(count), &base) + } + IncludeCount::False => { + let page = repo + .user_registration_token() + .list(filter, pagination) + .await? + .map(|token| UserRegistrationToken::new(token, now)); + PaginatedResponse::for_page(page, pagination, None, &base) + } + IncludeCount::Only => { + let count = repo.user_registration_token().count(filter).await?; + PaginatedResponse::for_count_only(count, &base) + } + }; + + Ok(Json(response)) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use mas_data_model::Clock as _; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + async fn create_test_tokens(state: &mut TestState) { + let mut repo = state.repository().await.unwrap(); + + // Token 1: Never used, not revoked + repo.user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "token_unused".to_owned(), + Some(10), + None, + ) + .await + .unwrap(); + + // Token 2: Used, not revoked + let token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "token_used".to_owned(), + Some(10), + None, + ) + .await + .unwrap(); + repo.user_registration_token() + .use_token(&state.clock, token) + .await + .unwrap(); + + // Token 3: Never used, revoked + let token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "token_revoked".to_owned(), + Some(10), + None, + ) + .await + .unwrap(); + repo.user_registration_token() + .revoke(&state.clock, token) + .await + .unwrap(); + + // Token 4: Used, revoked + let token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "token_used_revoked".to_owned(), + Some(10), + None, + ) + .await + .unwrap(); + let token = repo + .user_registration_token() + .use_token(&state.clock, token) + .await + .unwrap(); + repo.user_registration_token() + .revoke(&state.clock, token) + .await + .unwrap(); + + // Token 5: Expired token + let expires_at = state.clock.now() - Duration::try_days(1).unwrap(); + repo.user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "token_expired".to_owned(), + Some(5), + Some(expires_at), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_list_all_tokens(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_tokens(&mut state).await; + + let request = Request::get("/api/admin/v1/user-registration-tokens") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 5 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG064K8BYZXSY5G511Z", + "attributes": { + "token": "token_expired", + "valid": false, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-01-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG064K8BYZXSY5G511Z" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG064K8BYZXSY5G511Z" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "token": "token_used", + "valid": true, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "token": "token_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "token_unused", + "valid": true, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0S3ZJD8CXQ7F11KXN", + "attributes": { + "token": "token_used_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0S3ZJD8CXQ7F11KXN" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0S3ZJD8CXQ7F11KXN" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?page[last]=10" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_filter_by_used(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_tokens(&mut state).await; + + // Filter for used tokens + let request = Request::get("/api/admin/v1/user-registration-tokens?filter[used]=true") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "token": "token_used", + "valid": true, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0S3ZJD8CXQ7F11KXN", + "attributes": { + "token": "token_used_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0S3ZJD8CXQ7F11KXN" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0S3ZJD8CXQ7F11KXN" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[used]=true&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[used]=true&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[used]=true&page[last]=10" + } + } + "#); + + // Filter for unused tokens + let request = Request::get("/api/admin/v1/user-registration-tokens?filter[used]=false") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 3 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG064K8BYZXSY5G511Z", + "attributes": { + "token": "token_expired", + "valid": false, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-01-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG064K8BYZXSY5G511Z" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG064K8BYZXSY5G511Z" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "token": "token_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "token_unused", + "valid": true, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[used]=false&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[used]=false&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[used]=false&page[last]=10" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_filter_by_revoked(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_tokens(&mut state).await; + + // Filter for revoked tokens + let request = Request::get("/api/admin/v1/user-registration-tokens?filter[revoked]=true") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "token": "token_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0S3ZJD8CXQ7F11KXN", + "attributes": { + "token": "token_used_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0S3ZJD8CXQ7F11KXN" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0S3ZJD8CXQ7F11KXN" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[revoked]=true&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[revoked]=true&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[revoked]=true&page[last]=10" + } + } + "#); + + // Filter for non-revoked tokens + let request = Request::get("/api/admin/v1/user-registration-tokens?filter[revoked]=false") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 3 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG064K8BYZXSY5G511Z", + "attributes": { + "token": "token_expired", + "valid": false, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-01-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG064K8BYZXSY5G511Z" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG064K8BYZXSY5G511Z" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "token": "token_used", + "valid": true, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "token_unused", + "valid": true, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[revoked]=false&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[revoked]=false&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[revoked]=false&page[last]=10" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_filter_by_expired(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_tokens(&mut state).await; + + // Filter for expired tokens + let request = Request::get("/api/admin/v1/user-registration-tokens?filter[expired]=true") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG064K8BYZXSY5G511Z", + "attributes": { + "token": "token_expired", + "valid": false, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-01-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG064K8BYZXSY5G511Z" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG064K8BYZXSY5G511Z" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[expired]=true&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[expired]=true&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[expired]=true&page[last]=10" + } + } + "#); + + // Filter for non-expired tokens + let request = Request::get("/api/admin/v1/user-registration-tokens?filter[expired]=false") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 4 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "token": "token_used", + "valid": true, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "token": "token_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "token_unused", + "valid": true, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0S3ZJD8CXQ7F11KXN", + "attributes": { + "token": "token_used_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0S3ZJD8CXQ7F11KXN" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0S3ZJD8CXQ7F11KXN" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[expired]=false&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[expired]=false&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[expired]=false&page[last]=10" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_filter_by_valid(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_tokens(&mut state).await; + + // Filter for valid tokens + let request = Request::get("/api/admin/v1/user-registration-tokens?filter[valid]=true") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "token": "token_used", + "valid": true, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "token_unused", + "valid": true, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[valid]=true&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[valid]=true&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[valid]=true&page[last]=10" + } + } + "#); + + // Filter for invalid tokens + let request = Request::get("/api/admin/v1/user-registration-tokens?filter[valid]=false") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 3 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG064K8BYZXSY5G511Z", + "attributes": { + "token": "token_expired", + "valid": false, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-01-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG064K8BYZXSY5G511Z" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG064K8BYZXSY5G511Z" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "token": "token_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0S3ZJD8CXQ7F11KXN", + "attributes": { + "token": "token_used_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0S3ZJD8CXQ7F11KXN" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0S3ZJD8CXQ7F11KXN" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[valid]=false&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[valid]=false&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[valid]=false&page[last]=10" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_combined_filters(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_tokens(&mut state).await; + + // Filter for used AND revoked tokens + let request = Request::get( + "/api/admin/v1/user-registration-tokens?filter[used]=true&filter[revoked]=true", + ) + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG0S3ZJD8CXQ7F11KXN", + "attributes": { + "token": "token_used_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0S3ZJD8CXQ7F11KXN" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0S3ZJD8CXQ7F11KXN" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[used]=true&filter[revoked]=true&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[used]=true&filter[revoked]=true&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[used]=true&filter[revoked]=true&page[last]=10" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_pagination(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_tokens(&mut state).await; + + // Request with pagination (2 per page) + let request = Request::get("/api/admin/v1/user-registration-tokens?page[first]=2") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 5 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG064K8BYZXSY5G511Z", + "attributes": { + "token": "token_expired", + "valid": false, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-01-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG064K8BYZXSY5G511Z" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG064K8BYZXSY5G511Z" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "token": "token_used", + "valid": true, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?page[first]=2", + "first": "/api/admin/v1/user-registration-tokens?page[first]=2", + "last": "/api/admin/v1/user-registration-tokens?page[last]=2", + "next": "/api/admin/v1/user-registration-tokens?page[after]=01FSHN9AG07HNEZXNQM2KNBNF6&page[first]=2" + } + } + "#); + + // Request second page + let request = Request::get("/api/admin/v1/user-registration-tokens?page[after]=01FSHN9AG07HNEZXNQM2KNBNF6&page[first]=2") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 5 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "token": "token_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "token_unused", + "valid": true, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?page[after]=01FSHN9AG07HNEZXNQM2KNBNF6&page[first]=2", + "first": "/api/admin/v1/user-registration-tokens?page[first]=2", + "last": "/api/admin/v1/user-registration-tokens?page[last]=2", + "next": "/api/admin/v1/user-registration-tokens?page[after]=01FSHN9AG0MZAA6S4AF7CTV32E&page[first]=2" + } + } + "#); + + // Request last item + let request = Request::get("/api/admin/v1/user-registration-tokens?page[last]=1") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 5 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG0S3ZJD8CXQ7F11KXN", + "attributes": { + "token": "token_used_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0S3ZJD8CXQ7F11KXN" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0S3ZJD8CXQ7F11KXN" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?page[last]=1", + "first": "/api/admin/v1/user-registration-tokens?page[first]=1", + "last": "/api/admin/v1/user-registration-tokens?page[last]=1", + "prev": "/api/admin/v1/user-registration-tokens?page[before]=01FSHN9AG0S3ZJD8CXQ7F11KXN&page[last]=1" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_invalid_filter(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + + // Try with invalid filter value + let request = Request::get("/api/admin/v1/user-registration-tokens?filter[used]=invalid") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + + let body: serde_json::Value = response.json(); + assert!( + body["errors"][0]["title"] + .as_str() + .unwrap() + .contains("Invalid filter parameters") + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_count_parameter(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let admin_token = state.token_with_scope("urn:mas:admin").await; + create_test_tokens(&mut state).await; + + // Test count=false + let request = Request::get("/api/admin/v1/user-registration-tokens?count=false") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG064K8BYZXSY5G511Z", + "attributes": { + "token": "token_expired", + "valid": false, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-01-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG064K8BYZXSY5G511Z" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG064K8BYZXSY5G511Z" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "token": "token_used", + "valid": true, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG09AVTNSQFMSR34AJC", + "attributes": { + "token": "token_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG09AVTNSQFMSR34AJC" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG09AVTNSQFMSR34AJC" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "token_unused", + "valid": true, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0S3ZJD8CXQ7F11KXN", + "attributes": { + "token": "token_used_revoked", + "valid": false, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": "2022-01-16T14:40:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0S3ZJD8CXQ7F11KXN" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0S3ZJD8CXQ7F11KXN" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?count=false&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?count=false&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?count=false&page[last]=10" + } + } + "#); + + // Test count=only + let request = Request::get("/api/admin/v1/user-registration-tokens?count=only") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 5 + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens?count=only" + } + } + "#); + + // Test count=false with filtering + let request = + Request::get("/api/admin/v1/user-registration-tokens?count=false&filter[valid]=true") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "user-registration_token", + "id": "01FSHN9AG07HNEZXNQM2KNBNF6", + "attributes": { + "token": "token_used", + "valid": true, + "usage_limit": 10, + "times_used": 1, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": "2022-01-16T14:40:00Z", + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG07HNEZXNQM2KNBNF6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG07HNEZXNQM2KNBNF6" + } + } + }, + { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "token_unused", + "valid": true, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[valid]=true&count=false&page[first]=10", + "first": "/api/admin/v1/user-registration-tokens?filter[valid]=true&count=false&page[first]=10", + "last": "/api/admin/v1/user-registration-tokens?filter[valid]=true&count=false&page[last]=10" + } + } + "#); + + // Test count=only with filtering + let request = + Request::get("/api/admin/v1/user-registration-tokens?count=only&filter[revoked]=true") + .bearer(&admin_token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens?filter[revoked]=true&count=only" + } + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/mod.rs new file mode 100644 index 00000000..42d16af7 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/mod.rs @@ -0,0 +1,21 @@ +// Copyright 2025 New Vector Ltd. +// Copyright 2025 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod add; +mod get; +mod list; +mod revoke; +mod unrevoke; +mod update; + +pub use self::{ + add::{doc as add_doc, handler as add}, + get::{doc as get_doc, handler as get}, + list::{doc as list_doc, handler as list}, + revoke::{doc as revoke_doc, handler as revoke}, + unrevoke::{doc as unrevoke_doc, handler as unrevoke}, + update::{doc as update_doc, handler as update}, +}; diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/revoke.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/revoke.rs new file mode 100644 index 00000000..b2632d4b --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/revoke.rs @@ -0,0 +1,218 @@ +// Copyright 2025 New Vector Ltd. +// Copyright 2025 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UserRegistrationToken}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Registration token with ID {0} not found")] + NotFound(Ulid), + + #[error("Registration token with ID {0} is already revoked")] + AlreadyRevoked(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + Self::AlreadyRevoked(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("revokeUserRegistrationToken") + .summary("Revoke a user registration token") + .description("Calling this endpoint will revoke the user registration token, preventing it from being used for new registrations.") + .tag("user-registration-token") + .response_with::<200, Json>, _>(|t| { + // Get the revoked token sample + let [_, revoked_token] = UserRegistrationToken::samples(); + let id = revoked_token.id(); + let response = SingleResponse::new(revoked_token, format!("/api/admin/v1/user-registration-tokens/{id}/revoke")); + t.description("Registration token was revoked").example(response) + }) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::AlreadyRevoked(Ulid::nil())); + t.description("Token is already revoked").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Registration token was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_registration_tokens.revoke", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let id = *id; + let token = repo + .user_registration_token() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + // Check if the token is already revoked + if token.revoked_at.is_some() { + return Err(RouteError::AlreadyRevoked(id)); + } + + // Revoke the token + let token = repo.user_registration_token().revoke(&clock, token).await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + UserRegistrationToken::new(token, clock.now()), + format!("/api/admin/v1/user-registration-tokens/{id}/revoke"), + ))) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use mas_data_model::Clock as _; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_revoke_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let registration_token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "test_token_456".to_owned(), + Some(5), + None, + ) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::post(format!( + "/api/admin/v1/user-registration-tokens/{}/revoke", + registration_token.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The revoked_at timestamp should be the same as the current time + assert_eq!( + body["data"]["attributes"]["revoked_at"], + serde_json::json!(state.clock.now()) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_revoke_already_revoked_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let registration_token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "test_token_789".to_owned(), + None, + None, + ) + .await + .unwrap(); + + // Revoke the token first + let registration_token = repo + .user_registration_token() + .revoke(&state.clock, registration_token) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Move the clock forward + state.clock.advance(Duration::try_minutes(1).unwrap()); + + let request = Request::post(format!( + "/api/admin/v1/user-registration-tokens/{}/revoke", + registration_token.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + format!( + "Registration token with ID {} is already revoked", + registration_token.id + ) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_revoke_unknown_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post( + "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081/revoke", + ) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "Registration token with ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/unrevoke.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/unrevoke.rs new file mode 100644 index 00000000..212b7cdf --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/unrevoke.rs @@ -0,0 +1,238 @@ +// Copyright 2025 New Vector Ltd. +// Copyright 2025 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UserRegistrationToken}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Registration token with ID {0} not found")] + NotFound(Ulid), + + #[error("Registration token with ID {0} is not revoked")] + NotRevoked(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + Self::NotRevoked(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("unrevokeUserRegistrationToken") + .summary("Unrevoke a user registration token") + .description("Calling this endpoint will unrevoke a previously revoked user registration token, allowing it to be used for registrations again (subject to its usage limits and expiration).") + .tag("user-registration-token") + .response_with::<200, Json>, _>(|t| { + // Get the valid token sample + let [valid_token, _] = UserRegistrationToken::samples(); + let id = valid_token.id(); + let response = SingleResponse::new(valid_token, format!("/api/admin/v1/user-registration-tokens/{id}/unrevoke")); + t.description("Registration token was unrevoked").example(response) + }) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotRevoked(Ulid::nil())); + t.description("Token is not revoked").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Registration token was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_registration_tokens.unrevoke", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let id = *id; + let token = repo + .user_registration_token() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + // Check if the token is not revoked + if token.revoked_at.is_none() { + return Err(RouteError::NotRevoked(id)); + } + + // Unrevoke the token using the repository method + let token = repo.user_registration_token().unrevoke(token).await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + UserRegistrationToken::new(token, clock.now()), + format!("/api/admin/v1/user-registration-tokens/{id}/unrevoke"), + ))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unrevoke_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + + // Create a token + let registration_token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "test_token_456".to_owned(), + Some(5), + None, + ) + .await + .unwrap(); + + // Revoke it + let registration_token = repo + .user_registration_token() + .revoke(&state.clock, registration_token) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Now unrevoke it + let request = Request::post(format!( + "/api/admin/v1/user-registration-tokens/{}/unrevoke", + registration_token.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The revoked_at timestamp should be null + insta::assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_token_456", + "valid": true, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E/unrevoke" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unrevoke_not_revoked_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let registration_token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "test_token_789".to_owned(), + None, + None, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Try to unrevoke a token that's not revoked + let request = Request::post(format!( + "/api/admin/v1/user-registration-tokens/{}/unrevoke", + registration_token.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + format!( + "Registration token with ID {} is not revoked", + registration_token.id + ) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unrevoke_unknown_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post( + "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081/unrevoke", + ) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "Registration token with ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/update.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/update.rs new file mode 100644 index 00000000..06ad8283 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_registration_tokens/update.rs @@ -0,0 +1,512 @@ +// Copyright 2025 New Vector Ltd. +// Copyright 2025 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use chrono::{DateTime, Utc}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use schemars::JsonSchema; +use serde::{Deserialize, Deserializer}; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UserRegistrationToken}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +// Any value that is present is considered Some value, including null. +fn deserialize_some<'de, T, D>(deserializer: D) -> Result, D::Error> +where + T: Deserialize<'de>, + D: Deserializer<'de>, +{ + Deserialize::deserialize(deserializer).map(Some) +} + +/// # JSON payload for the `PUT /api/admin/v1/user-registration-tokens/{id}` endpoint +#[derive(Deserialize, JsonSchema)] +#[serde(rename = "EditUserRegistrationTokenRequest")] +pub struct Request { + /// New expiration date for the token, or null to remove expiration + #[serde( + skip_serializing_if = "Option::is_none", + default, + deserialize_with = "deserialize_some" + )] + #[expect(clippy::option_option)] + expires_at: Option>>, + + /// New usage limit for the token, or null to remove the limit + #[expect(clippy::option_option)] + #[serde( + skip_serializing_if = "Option::is_none", + default, + deserialize_with = "deserialize_some" + )] + usage_limit: Option>, +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Registration token with ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("updateUserRegistrationToken") + .summary("Update a user registration token") + .description("Update properties of a user registration token such as expiration and usage limit. To set a field to null (removing the limit/expiration), include the field with a null value. To leave a field unchanged, omit it from the request body.") + .tag("user-registration-token") + .response_with::<200, Json>, _>(|t| { + // Get the valid token sample + let [valid_token, _] = UserRegistrationToken::samples(); + let id = valid_token.id(); + let response = SingleResponse::new(valid_token, format!("/api/admin/v1/user-registration-tokens/{id}")); + t.description("Registration token was updated").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("Registration token was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_registration_tokens.update", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + id: UlidPathParam, + Json(request): Json, +) -> Result>, RouteError> { + let id = *id; + + // Get the token + let mut token = repo + .user_registration_token() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + // Update expiration if present in the request + if let Some(expires_at) = request.expires_at { + token = repo + .user_registration_token() + .set_expiry(token, expires_at) + .await?; + } + + // Update usage limit if present in the request + if let Some(usage_limit) = request.usage_limit { + token = repo + .user_registration_token() + .set_usage_limit(token, usage_limit) + .await?; + } + + repo.save().await?; + + Ok(Json(SingleResponse::new( + UserRegistrationToken::new(token, clock.now()), + format!("/api/admin/v1/user-registration-tokens/{id}"), + ))) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use mas_data_model::Clock as _; + use serde_json::json; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_update_expiry(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + + // Create a token without expiry + let registration_token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "test_update_expiry".to_owned(), + None, + None, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Update with an expiry date + let future_date = state.clock.now() + Duration::days(30); + let request = Request::put(format!( + "/api/admin/v1/user-registration-tokens/{}", + registration_token.id + )) + .bearer(&token) + .json(json!({ + "expires_at": future_date + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // Verify expiry was updated + insta::assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_update_expiry", + "valid": true, + "usage_limit": null, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-02-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + + // Now remove the expiry + let request = Request::put(format!( + "/api/admin/v1/user-registration-tokens/{}", + registration_token.id + )) + .bearer(&token) + .json(json!({ + "expires_at": null + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // Verify expiry was removed + insta::assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_update_expiry", + "valid": true, + "usage_limit": null, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_update_usage_limit(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + + // Create a token with usage limit + let registration_token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "test_update_limit".to_owned(), + Some(5), + None, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Update the usage limit + let request = Request::put(format!( + "/api/admin/v1/user-registration-tokens/{}", + registration_token.id + )) + .bearer(&token) + .json(json!({ + "usage_limit": 10 + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // Verify usage limit was updated + insta::assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_update_limit", + "valid": true, + "usage_limit": 10, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + + // Now remove the usage limit + let request = Request::put(format!( + "/api/admin/v1/user-registration-tokens/{}", + registration_token.id + )) + .bearer(&token) + .json(json!({ + "usage_limit": null + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // Verify usage limit was removed + insta::assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_update_limit", + "valid": true, + "usage_limit": null, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_update_multiple_fields(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + + // Create a token + let registration_token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "test_update_multiple".to_owned(), + None, + None, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Update both fields + let future_date = state.clock.now() + Duration::days(30); + let request = Request::put(format!( + "/api/admin/v1/user-registration-tokens/{}", + registration_token.id + )) + .bearer(&token) + .json(json!({ + "expires_at": future_date, + "usage_limit": 20 + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // Both fields were updated + insta::assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_update_multiple", + "valid": true, + "usage_limit": 20, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-02-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_update_no_fields(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + + // Create a token + let registration_token = repo + .user_registration_token() + .add( + &mut state.rng(), + &state.clock, + "test_update_none".to_owned(), + Some(5), + Some(state.clock.now() + Duration::days(30)), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Send empty update + let request = Request::put(format!( + "/api/admin/v1/user-registration-tokens/{}", + registration_token.id + )) + .bearer(&token) + .json(json!({})); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // It shouldn't have updated the token + insta::assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user-registration_token", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "token": "test_update_none", + "valid": true, + "usage_limit": 5, + "times_used": 0, + "created_at": "2022-01-16T14:40:00Z", + "last_used_at": null, + "expires_at": "2022-02-15T14:40:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_update_unknown_token(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Try to update a non-existent token + let request = + Request::put("/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081") + .bearer(&token) + .json(json!({ + "usage_limit": 5 + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + + assert_eq!( + body["errors"][0]["title"], + "Registration token with ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/finish.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/finish.rs new file mode 100644 index 00000000..a50253f1 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/finish.rs @@ -0,0 +1,216 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UserSession}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User session with ID {0} not found")] + NotFound(Ulid), + + #[error("User session with ID {0} is already finished")] + AlreadyFinished(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + Self::AlreadyFinished(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("finishUserSession") + .summary("Finish a user session") + .description( + "Calling this endpoint will finish the user session, preventing any further use.", + ) + .tag("user-session") + .response_with::<200, Json>, _>(|t| { + // Get the finished session sample + let [_, _, finished_session] = UserSession::samples(); + let id = finished_session.id(); + let response = SingleResponse::new( + finished_session, + format!("/api/admin/v1/user-sessions/{id}/finish"), + ); + t.description("User session was finished").example(response) + }) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::AlreadyFinished(Ulid::nil())); + t.description("Session is already finished") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User session was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_sessions.finish", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let id = *id; + let session = repo + .browser_session() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + // Check if the session is already finished + if session.finished_at.is_some() { + return Err(RouteError::AlreadyFinished(id)); + } + + // Finish the session + let session = repo.browser_session().finish(&clock, session).await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + UserSession::from(session), + format!("/api/admin/v1/user-sessions/{id}/finish"), + ))) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use mas_data_model::Clock as _; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_finish_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a user and a user session + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let session = repo + .browser_session() + .add(&mut rng, &state.clock, &user, None) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::post(format!("/api/admin/v1/user-sessions/{}/finish", session.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The finished_at timestamp should be the same as the current time + assert_eq!( + body["data"]["attributes"]["finished_at"], + serde_json::json!(state.clock.now()) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_finish_already_finished_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a user and a user session + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let session = repo + .browser_session() + .add(&mut rng, &state.clock, &user, None) + .await + .unwrap(); + + // Finish the session first + let session = repo + .browser_session() + .finish(&state.clock, session) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Move the clock forward + state.clock.advance(Duration::try_minutes(1).unwrap()); + + let request = Request::post(format!("/api/admin/v1/user-sessions/{}/finish", session.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + format!("User session with ID {} is already finished", session.id) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_finish_unknown_session(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = + Request::post("/api/admin/v1/user-sessions/01040G2081040G2081040G2081/finish") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "User session with ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/get.rs new file mode 100644 index 00000000..0a65c80c --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/get.rs @@ -0,0 +1,138 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::UserSession, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User session ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getUserSession") + .summary("Get a user session") + .tag("user-session") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = UserSession::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("User session was found").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User session was not found") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_sessions.get", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let session = repo + .browser_session() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + Ok(Json(SingleResponse::new_canonical(UserSession::from( + session, + )))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision a user and a user session + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let session = repo + .browser_session() + .add(&mut rng, &state.clock, &user, None) + .await + .unwrap(); + repo.save().await.unwrap(); + + let session_id = session.id; + let request = Request::get(format!("/api/admin/v1/user-sessions/{session_id}")) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "data": { + "type": "user-session", + "id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "attributes": { + "created_at": "2022-01-16T14:40:00Z", + "finished_at": null, + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + } + "###); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/list.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/list.rs new file mode 100644 index 00000000..ad8a0598 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/list.rs @@ -0,0 +1,585 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::{pagination::Page, user::BrowserSessionFilter}; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, UserSession}, + params::{IncludeCount, Pagination}, + response::{ErrorResponse, PaginatedResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Deserialize, JsonSchema, Clone, Copy)] +#[serde(rename_all = "snake_case")] +enum UserSessionStatus { + Active, + Finished, +} + +impl std::fmt::Display for UserSessionStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Active => write!(f, "active"), + Self::Finished => write!(f, "finished"), + } + } +} + +#[derive(FromRequestParts, Deserialize, JsonSchema, OperationIo)] +#[serde(rename = "UserSessionFilter")] +#[aide(input_with = "Query")] +#[from_request(via(Query), rejection(RouteError))] +pub struct FilterParams { + /// Retrieve the items for the given user + #[serde(rename = "filter[user]")] + #[schemars(with = "Option")] + user: Option, + + /// Retrieve the items with the given status + /// + /// Defaults to retrieve all sessions, including finished ones. + /// + /// * `active`: Only retrieve active sessions + /// + /// * `finished`: Only retrieve finished sessions + #[serde(rename = "filter[status]")] + status: Option, +} + +impl std::fmt::Display for FilterParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut sep = '?'; + + if let Some(user) = self.user { + write!(f, "{sep}filter[user]={user}")?; + sep = '&'; + } + + if let Some(status) = self.status { + write!(f, "{sep}filter[status]={status}")?; + sep = '&'; + } + + let _ = sep; + Ok(()) + } +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + UserNotFound(Ulid), + + #[error("Invalid filter parameters")] + InvalidFilter(#[from] QueryRejection), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::UserNotFound(_) => StatusCode::NOT_FOUND, + Self::InvalidFilter(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("listUserSessions") + .summary("List user sessions") + .description("Retrieve a list of user sessions (browser sessions). +Note that by default, all sessions, including finished ones are returned, with the oldest first. +Use the `filter[status]` parameter to filter the sessions by their status and `page[last]` parameter to retrieve the last N sessions.") + .tag("user-session") + .response_with::<200, Json>, _>(|t| { + let sessions = UserSession::samples(); + let pagination = mas_storage::Pagination::first(sessions.len()); + let page = Page { + edges: sessions + .into_iter() + .map(|node| mas_storage::pagination::Edge { + cursor: node.id(), + node, + }) + .collect(), + has_next_page: true, + has_previous_page: false, + }; + + t.description("Paginated response of user sessions") + .example(PaginatedResponse::for_page( + page, + pagination, + Some(42), + UserSession::PATH, + )) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserNotFound(Ulid::nil())); + t.description("User was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.user_sessions.list", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + Pagination(pagination, include_count): Pagination, + params: FilterParams, +) -> Result>, RouteError> { + let base = format!("{path}{params}", path = UserSession::PATH); + let base = include_count.add_to_base(&base); + let filter = BrowserSessionFilter::default(); + + // Load the user from the filter + let user = if let Some(user_id) = params.user { + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::UserNotFound(user_id))?; + + Some(user) + } else { + None + }; + + let filter = match &user { + Some(user) => filter.for_user(user), + None => filter, + }; + + let filter = match params.status { + Some(UserSessionStatus::Active) => filter.active_only(), + Some(UserSessionStatus::Finished) => filter.finished_only(), + None => filter, + }; + + let response = match include_count { + IncludeCount::True => { + let page = repo + .browser_session() + .list(filter, pagination) + .await? + .map(UserSession::from); + let count = repo.browser_session().count(filter).await?; + PaginatedResponse::for_page(page, pagination, Some(count), &base) + } + IncludeCount::False => { + let page = repo + .browser_session() + .list(filter, pagination) + .await? + .map(UserSession::from); + PaginatedResponse::for_page(page, pagination, None, &base) + } + IncludeCount::Only => { + let count = repo.browser_session().count(filter).await?; + PaginatedResponse::for_count_only(count, &base) + } + }; + + Ok(Json(response)) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_user_session_list(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision two users, one user session for each, and finish one of them + let mut repo = state.repository().await.unwrap(); + let alice = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + state.clock.advance(Duration::minutes(1)); + + let bob = repo + .user() + .add(&mut rng, &state.clock, "bob".to_owned()) + .await + .unwrap(); + + repo.browser_session() + .add(&mut rng, &state.clock, &alice, None) + .await + .unwrap(); + + let session = repo + .browser_session() + .add(&mut rng, &state.clock, &bob, None) + .await + .unwrap(); + state.clock.advance(Duration::minutes(1)); + repo.browser_session() + .finish(&state.clock, session) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::get("/api/admin/v1/user-sessions") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "user-session", + "id": "01FSHNB5309NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:41:00Z", + "finished_at": null, + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHNB5309NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + }, + { + "type": "user-session", + "id": "01FSHNB530KEPHYQQXW9XPTX6Z", + "attributes": { + "created_at": "2022-01-16T14:41:00Z", + "finished_at": "2022-01-16T14:42:00Z", + "user_id": "01FSHNB530AJ6AC5HQ9X6H4RP4", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHNB530KEPHYQQXW9XPTX6Z" + }, + "meta": { + "page": { + "cursor": "01FSHNB530AJ6AC5HQ9X6H4RP4" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-sessions?page[first]=10", + "first": "/api/admin/v1/user-sessions?page[first]=10", + "last": "/api/admin/v1/user-sessions?page[last]=10" + } + } + "#); + + // Filter by user + let request = Request::get(format!( + "/api/admin/v1/user-sessions?filter[user]={}", + alice.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "user-session", + "id": "01FSHNB5309NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:41:00Z", + "finished_at": null, + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHNB5309NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[first]=10", + "first": "/api/admin/v1/user-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[first]=10", + "last": "/api/admin/v1/user-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&page[last]=10" + } + } + "#); + + // Filter by status (active) + let request = Request::get("/api/admin/v1/user-sessions?filter[status]=active") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "user-session", + "id": "01FSHNB5309NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:41:00Z", + "finished_at": null, + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHNB5309NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-sessions?filter[status]=active&page[first]=10", + "first": "/api/admin/v1/user-sessions?filter[status]=active&page[first]=10", + "last": "/api/admin/v1/user-sessions?filter[status]=active&page[last]=10" + } + } + "#); + + // Filter by status (finished) + let request = Request::get("/api/admin/v1/user-sessions?filter[status]=finished") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "data": [ + { + "type": "user-session", + "id": "01FSHNB530KEPHYQQXW9XPTX6Z", + "attributes": { + "created_at": "2022-01-16T14:41:00Z", + "finished_at": "2022-01-16T14:42:00Z", + "user_id": "01FSHNB530AJ6AC5HQ9X6H4RP4", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHNB530KEPHYQQXW9XPTX6Z" + }, + "meta": { + "page": { + "cursor": "01FSHNB530AJ6AC5HQ9X6H4RP4" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-sessions?filter[status]=finished&page[first]=10", + "first": "/api/admin/v1/user-sessions?filter[status]=finished&page[first]=10", + "last": "/api/admin/v1/user-sessions?filter[status]=finished&page[last]=10" + } + } + "#); + + // Test count=false + let request = Request::get("/api/admin/v1/user-sessions?count=false") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "user-session", + "id": "01FSHNB5309NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:41:00Z", + "finished_at": null, + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHNB5309NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + }, + { + "type": "user-session", + "id": "01FSHNB530KEPHYQQXW9XPTX6Z", + "attributes": { + "created_at": "2022-01-16T14:41:00Z", + "finished_at": "2022-01-16T14:42:00Z", + "user_id": "01FSHNB530AJ6AC5HQ9X6H4RP4", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHNB530KEPHYQQXW9XPTX6Z" + }, + "meta": { + "page": { + "cursor": "01FSHNB530AJ6AC5HQ9X6H4RP4" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-sessions?count=false&page[first]=10", + "first": "/api/admin/v1/user-sessions?count=false&page[first]=10", + "last": "/api/admin/v1/user-sessions?count=false&page[last]=10" + } + } + "#); + + // Test count=only + let request = Request::get("/api/admin/v1/user-sessions?count=only") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r###" + { + "meta": { + "count": 2 + }, + "links": { + "self": "/api/admin/v1/user-sessions?count=only" + } + } + "###); + + // Test count=false with filtering + let request = Request::get(format!( + "/api/admin/v1/user-sessions?count=false&filter[user]={}", + alice.id + )) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "user-session", + "id": "01FSHNB5309NMZYX8MFYH578R9", + "attributes": { + "created_at": "2022-01-16T14:41:00Z", + "finished_at": null, + "user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/01FSHNB5309NMZYX8MFYH578R9" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[first]=10", + "first": "/api/admin/v1/user-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[first]=10", + "last": "/api/admin/v1/user-sessions?filter[user]=01FSHN9AG0MZAA6S4AF7CTV32E&count=false&page[last]=10" + } + } + "#); + + // Test count=only with filtering + let request = Request::get("/api/admin/v1/user-sessions?count=only&filter[status]=active") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "links": { + "self": "/api/admin/v1/user-sessions?filter[status]=active&count=only" + } + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/mod.rs new file mode 100644 index 00000000..db7b17ff --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/user_sessions/mod.rs @@ -0,0 +1,14 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod finish; +mod get; +mod list; + +pub use self::{ + finish::{doc as finish_doc, handler as finish}, + get::{doc as get_doc, handler as get}, + list::{doc as list_doc, handler as list}, +}; diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/add.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/add.rs new file mode 100644 index 00000000..07e87fb4 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/add.rs @@ -0,0 +1,326 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, extract::State, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use mas_matrix::{HomeserverConnection, ProvisionRequest}; +use schemars::JsonSchema; +use serde::Deserialize; +use tracing::warn; + +use crate::{ + admin::{ + call_context::CallContext, + model::User, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +fn valid_username_character(c: char) -> bool { + c.is_ascii_lowercase() + || c.is_ascii_digit() + || c == '=' + || c == '_' + || c == '-' + || c == '.' + || c == '/' + || c == '+' +} + +// XXX: this should be shared with the graphql handler +fn username_valid(username: &str) -> bool { + if username.is_empty() || username.len() > 255 { + return false; + } + + // Should not start with an underscore + if username.starts_with('_') { + return false; + } + + // Should only contain valid characters + if !username.chars().all(valid_username_character) { + return false; + } + + true +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error(transparent)] + Homeserver(anyhow::Error), + + #[error("Username is not valid")] + UsernameNotValid, + + #[error("User already exists")] + UserAlreadyExists, + + #[error("Username is reserved by the homeserver")] + UsernameReserved, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_) | Self::Homeserver(_)); + let status = match self { + Self::Internal(_) | Self::Homeserver(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::UsernameNotValid => StatusCode::BAD_REQUEST, + Self::UserAlreadyExists | Self::UsernameReserved => StatusCode::CONFLICT, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +/// # JSON payload for the `POST /api/admin/v1/users` endpoint +#[derive(Deserialize, JsonSchema)] +#[serde(rename = "AddUserRequest")] +pub struct Request { + /// The username of the user to add. + username: String, + + /// Skip checking with the homeserver whether the username is available. + /// + /// Use this with caution! The main reason to use this, is when a user used + /// by an application service needs to exist in MAS to craft special + /// tokens (like with admin access) for them + #[serde(default)] + skip_homeserver_check: bool, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("createUser") + .summary("Create a new user") + .tag("user") + .response_with::<201, Json>, _>(|t| { + let [sample, ..] = User::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("User was created").example(response) + }) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UsernameNotValid); + t.description("Username is not valid").example(response) + }) + .response_with::<409, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UserAlreadyExists); + t.description("User already exists").example(response) + }) + .response_with::<409, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::UsernameReserved); + t.description("Username is reserved by the homeserver") + .example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.add", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + State(homeserver): State>, + Json(params): Json, +) -> Result<(StatusCode, Json>), RouteError> { + if repo.user().exists(¶ms.username).await? { + return Err(RouteError::UserAlreadyExists); + } + + // Do some basic check on the username + if !username_valid(¶ms.username) { + return Err(RouteError::UsernameNotValid); + } + + // Ask the homeserver if the username is available + let homeserver_available = homeserver + .is_localpart_available(¶ms.username) + .await + .map_err(RouteError::Homeserver)?; + + if !homeserver_available { + if !params.skip_homeserver_check { + return Err(RouteError::UsernameReserved); + } + + // If we skipped the check, we still want to shout about it + warn!("Skipped homeserver check for username {}", params.username); + } + + let user = repo.user().add(&mut rng, &clock, params.username).await?; + + homeserver + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .map_err(RouteError::Homeserver)?; + + repo.save().await?; + + Ok(( + StatusCode::CREATED, + Json(SingleResponse::new_canonical(User::from(user))), + )) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_matrix::HomeserverConnection; + use mas_storage::{RepositoryAccess, user::UserRepository}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_add_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/users") + .bearer(&token) + .json(serde_json::json!({ + "username": "alice", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let body: serde_json::Value = response.json(); + assert_eq!(body["data"]["type"], "user"); + let id = body["data"]["id"].as_str().unwrap(); + assert_eq!(body["data"]["attributes"]["username"], "alice"); + + // Check that the user was created in the database + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .lookup(id.parse().unwrap()) + .await + .unwrap() + .unwrap(); + + assert_eq!(user.username, "alice"); + + // Check that the user was created on the homeserver + let result = state.homeserver_connection.query_user("alice").await; + assert!(result.is_ok()); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_add_user_invalid_username(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/users") + .bearer(&token) + .json(serde_json::json!({ + "username": "this is invalid", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + + let body: serde_json::Value = response.json(); + assert_eq!(body["errors"][0]["title"], "Username is not valid"); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_add_user_exists(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/users") + .bearer(&token) + .json(serde_json::json!({ + "username": "alice", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let body: serde_json::Value = response.json(); + assert_eq!(body["data"]["type"], "user"); + assert_eq!(body["data"]["attributes"]["username"], "alice"); + + let request = Request::post("/api/admin/v1/users") + .bearer(&token) + .json(serde_json::json!({ + "username": "alice", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CONFLICT); + + let body: serde_json::Value = response.json(); + assert_eq!(body["errors"][0]["title"], "User already exists"); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_add_user_reserved(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Reserve a username on the homeserver and try to add it + state.homeserver_connection.reserve_localpart("bob").await; + + let request = Request::post("/api/admin/v1/users") + .bearer(&token) + .json(serde_json::json!({ + "username": "bob", + })); + + let response = state.request(request).await; + + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "Username is reserved by the homeserver" + ); + + // But we can force it with the skip_homeserver_check flag + let request = Request::post("/api/admin/v1/users") + .bearer(&token) + .json(serde_json::json!({ + "username": "bob", + "skip_homeserver_check": true, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let body: serde_json::Value = response.json(); + let id = body["data"]["id"].as_str().unwrap(); + assert_eq!(body["data"]["attributes"]["username"], "bob"); + + // Check that the user was created in the database + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .lookup(id.parse().unwrap()) + .await + .unwrap() + .unwrap(); + + assert_eq!(user.username, "bob"); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/by_username.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/by_username.rs new file mode 100644 index 00000000..2ba12203 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/by_username.rs @@ -0,0 +1,83 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, extract::Path, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use schemars::JsonSchema; +use serde::Deserialize; + +use crate::{ + admin::{ + call_context::CallContext, + model::User, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User with username {0:?} not found")] + NotFound(String), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +#[derive(Deserialize, JsonSchema)] +pub struct UsernamePathParam { + /// The username (localpart) of the user to get + username: String, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getUserByUsername") + .summary("Get a user by its username (localpart)") + .tag("user") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = User::samples(); + let response = + SingleResponse::new(sample, "/api/admin/v1/users/by-username/alice".to_owned()); + t.description("User was found").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound("alice".to_owned())); + t.description("User was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.by_username", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + Path(UsernamePathParam { username }): Path, +) -> Result>, RouteError> { + let self_path = format!("/api/admin/v1/users/by-username/{username}"); + let user = repo + .user() + .find_by_username(&username) + .await? + .ok_or(RouteError::NotFound(username))?; + + Ok(Json(SingleResponse::new(User::from(user), self_path))) +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/deactivate.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/deactivate.rs new file mode 100644 index 00000000..b963b73d --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/deactivate.rs @@ -0,0 +1,324 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use mas_storage::queue::{DeactivateUserJob, QueueJobRepositoryExt as _}; +use schemars::JsonSchema; +use serde::Deserialize; +use tracing::info; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, User}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +/// # JSON payload for the `POST /api/admin/v1/users/:id/deactivate` endpoint +#[derive(Default, Deserialize, JsonSchema)] +#[serde(rename = "DeactivateUserRequest")] +pub struct Request { + /// Whether to skip requesting the homeserver to GDPR-erase the user upon + /// deactivation. + #[serde(default)] + skip_erase: bool, +} + +pub fn doc(mut operation: TransformOperation) -> TransformOperation { + operation + .inner_mut() + .request_body + .as_mut() + .unwrap() + .as_item_mut() + .unwrap() + .required = false; + + operation + .id("deactivateUser") + .summary("Deactivate a user") + .description( + "Calling this endpoint will deactivate the user, preventing them from doing any action. +This invalidates any existing session, and will ask the homeserver to make them leave all rooms.", + ) + .tag("user") + .response_with::<200, Json>, _>(|t| { + // In the samples, the third user is the one locked + let [_alice, _bob, charlie, ..] = User::samples(); + let id = charlie.id(); + let response = + SingleResponse::new(charlie, format!("/api/admin/v1/users/{id}/deactivate")); + t.description("User was deactivated").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User ID not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.deactivate", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + id: UlidPathParam, + body: Option>, +) -> Result>, RouteError> { + let Json(params) = body.unwrap_or_default(); + let id = *id; + let user = repo + .user() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + let user = repo.user().deactivate(&clock, user).await?; + + info!(%user.id, "Scheduling deactivation of user"); + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + DeactivateUserJob::new(&user, !params.skip_erase), + ) + .await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + User::from(user), + format!("/api/admin/v1/users/{id}/deactivate"), + ))) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use insta::{allow_duplicates, assert_json_snapshot}; + use mas_data_model::Clock; + use mas_storage::{RepositoryAccess, user::UserRepository}; + use sqlx::{PgPool, types::Json}; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + async fn test_deactivate_user_helper(pool: PgPool, skip_erase: Option) { + setup(); + let mut state = TestState::from_pool(pool.clone()).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = + Request::post(format!("/api/admin/v1/users/{}/deactivate", user.id)).bearer(&token); + let request = match skip_erase { + None => request.empty(), + Some(skip_erase) => request.json(serde_json::json!({ + "skip_erase": skip_erase, + })), + }; + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The deactivated_at timestamp should be the same as the current time + assert_eq!( + body["data"]["attributes"]["deactivated_at"], + serde_json::json!(state.clock.now()) + ); + + // Deactivating the user should not lock it + assert_eq!( + body["data"]["attributes"]["locked_at"], + serde_json::Value::Null + ); + + // It should have scheduled a deactivation job for the user + // XXX: we don't have a good way to look for the deactivation job + let job: Json = sqlx::query_scalar( + "SELECT payload FROM queue_jobs WHERE queue_name = 'deactivate-user'", + ) + .fetch_one(&pool) + .await + .expect("Deactivation job to be scheduled"); + assert_eq!(job["user_id"], serde_json::json!(user.id)); + assert_eq!( + job["hs_erase"], + serde_json::json!(!skip_erase.unwrap_or(false)) + ); + + // Make sure to run the jobs in the queue + state.run_jobs_in_queue().await; + + let request = Request::get(format!("/api/admin/v1/users/{}", user.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + allow_duplicates!(assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "username": "alice", + "created_at": "2022-01-16T14:40:00Z", + "locked_at": null, + "deactivated_at": "2022-01-16T14:40:00Z", + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/users/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#)); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_deactivate_user(pool: PgPool) { + test_deactivate_user_helper(pool, Option::None).await; + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_deactivate_user_skip_erase(pool: PgPool) { + test_deactivate_user_helper(pool, Option::Some(true)).await; + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_deactivate_locked_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool.clone()).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + let user = repo.user().lock(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + // Move the clock forward to make sure the locked_at timestamp doesn't change + state.clock.advance(Duration::try_minutes(1).unwrap()); + + let request = Request::post(format!("/api/admin/v1/users/{}/deactivate", user.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The deactivated_at timestamp should be the same as the current time + assert_eq!( + body["data"]["attributes"]["deactivated_at"], + serde_json::json!(state.clock.now()) + ); + + // The deactivated_at timestamp should be different from the locked_at timestamp + assert_ne!( + body["data"]["attributes"]["deactivated_at"], + body["data"]["attributes"]["locked_at"], + ); + + // Make sure to run the jobs in the queue + state.run_jobs_in_queue().await; + + let request = Request::get(format!("/api/admin/v1/users/{}", user.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + assert_json_snapshot!(body, @r#" + { + "data": { + "type": "user", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "username": "alice", + "created_at": "2022-01-16T14:40:00Z", + "locked_at": "2022-01-16T14:40:00Z", + "deactivated_at": "2022-01-16T14:41:00Z", + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01FSHN9AG0MZAA6S4AF7CTV32E" + } + }, + "links": { + "self": "/api/admin/v1/users/01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + "#); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_deactivate_unknown_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/users/01040G2081040G2081040G2081/deactivate") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "User ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/get.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/get.rs new file mode 100644 index 00000000..afc17701 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/get.rs @@ -0,0 +1,75 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::User, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("getUser") + .summary("Get a user") + .tag("user") + .response_with::<200, Json>, _>(|t| { + let [sample, ..] = User::samples(); + let response = SingleResponse::new_canonical(sample); + t.description("User was found").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.get", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let user = repo + .user() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + Ok(Json(SingleResponse::new_canonical(User::from(user)))) +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/list.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/list.rs new file mode 100644 index 00000000..65375402 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/list.rs @@ -0,0 +1,431 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use axum_extra::extract::{Query, QueryRejection}; +use axum_macros::FromRequestParts; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_storage::{Page, user::UserFilter}; +use schemars::JsonSchema; +use serde::Deserialize; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, User}, + params::{IncludeCount, Pagination}, + response::{ErrorResponse, PaginatedResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Deserialize, JsonSchema, Clone, Copy)] +#[serde(rename_all = "snake_case")] +enum UserStatus { + Active, + Locked, + Deactivated, +} + +impl std::fmt::Display for UserStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Active => write!(f, "active"), + Self::Locked => write!(f, "locked"), + Self::Deactivated => write!(f, "deactivated"), + } + } +} + +#[derive(FromRequestParts, Deserialize, JsonSchema, OperationIo)] +#[serde(rename = "UserFilter")] +#[aide(input_with = "Query")] +#[from_request(via(Query), rejection(RouteError))] +pub struct FilterParams { + /// Retrieve users with (or without) the `admin` flag set + #[serde(rename = "filter[admin]")] + admin: Option, + + /// Retrieve users with (or without) the `legacy_guest` flag set + #[serde(rename = "filter[legacy-guest]")] + legacy_guest: Option, + + /// Retrieve users where the username matches contains the given string + /// + /// Note that this doesn't change the ordering of the result, which are + /// still ordered by ID. + #[serde(rename = "filter[search]")] + search: Option, + + /// Retrieve the items with the given status + /// + /// Defaults to retrieve all users, including locked ones. + /// + /// * `active`: Only retrieve active users + /// + /// * `locked`: Only retrieve locked users (includes deactivated users) + /// + /// * `deactivated`: Only retrieve deactivated users + #[serde(rename = "filter[status]")] + status: Option, +} + +impl std::fmt::Display for FilterParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut sep = '?'; + + if let Some(admin) = self.admin { + write!(f, "{sep}filter[admin]={admin}")?; + sep = '&'; + } + if let Some(legacy_guest) = self.legacy_guest { + write!(f, "{sep}filter[legacy-guest]={legacy_guest}")?; + sep = '&'; + } + if let Some(search) = &self.search { + write!(f, "{sep}filter[search]={search}")?; + sep = '&'; + } + if let Some(status) = self.status { + write!(f, "{sep}filter[status]={status}")?; + sep = '&'; + } + + let _ = sep; + Ok(()) + } +} + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Invalid filter parameters")] + InvalidFilter(#[from] QueryRejection), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::InvalidFilter(_) => StatusCode::BAD_REQUEST, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("listUsers") + .summary("List users") + .tag("user") + .response_with::<200, Json>, _>(|t| { + let users = User::samples(); + let pagination = mas_storage::Pagination::first(users.len()); + let page = Page { + edges: users + .into_iter() + .map(|node| mas_storage::pagination::Edge { + cursor: node.id(), + node, + }) + .collect(), + has_next_page: true, + has_previous_page: false, + }; + + t.description("Paginated response of users") + .example(PaginatedResponse::for_page( + page, + pagination, + Some(42), + User::PATH, + )) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.list", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + Pagination(pagination, include_count): Pagination, + params: FilterParams, +) -> Result>, RouteError> { + let base = format!("{path}{params}", path = User::PATH); + let base = include_count.add_to_base(&base); + let filter = UserFilter::default(); + + let filter = match params.admin { + Some(true) => filter.can_request_admin_only(), + Some(false) => filter.cannot_request_admin_only(), + None => filter, + }; + + let filter = match params.legacy_guest { + Some(true) => filter.guest_only(), + Some(false) => filter.non_guest_only(), + None => filter, + }; + + let filter = match params.search.as_deref() { + Some(search) => filter.matching_search(search), + None => filter, + }; + + let filter = match params.status { + Some(UserStatus::Active) => filter.active_only(), + Some(UserStatus::Locked) => filter.locked_only(), + Some(UserStatus::Deactivated) => filter.deactivated_only(), + None => filter, + }; + + let response = match include_count { + IncludeCount::True => { + let page = repo.user().list(filter, pagination).await?; + let count = repo.user().count(filter).await?; + PaginatedResponse::for_page(page.map(User::from), pagination, Some(count), &base) + } + IncludeCount::False => { + let page = repo.user().list(filter, pagination).await?; + PaginatedResponse::for_page(page.map(User::from), pagination, None, &base) + } + IncludeCount::Only => { + let count = repo.user().count(filter).await?; + PaginatedResponse::for_count_only(count, &base) + } + }; + + Ok(Json(response)) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_list_users(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + let mut rng = state.rng(); + + // Provision two users + let mut repo = state.repository().await.unwrap(); + repo.user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + repo.user() + .add(&mut rng, &state.clock, "bob".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + // Test default behavior (count=true) + let request = Request::get("/api/admin/v1/users").bearer(&token).empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 2 + }, + "data": [ + { + "type": "user", + "id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "attributes": { + "username": "bob", + "created_at": "2022-01-16T14:40:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + } + }, + { + "type": "user", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "username": "alice", + "created_at": "2022-01-16T14:40:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/users?page[first]=10", + "first": "/api/admin/v1/users?page[first]=10", + "last": "/api/admin/v1/users?page[last]=10" + } + } + "#); + + // Test count=false + let request = Request::get("/api/admin/v1/users?count=false") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "user", + "id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "attributes": { + "username": "bob", + "created_at": "2022-01-16T14:40:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + } + }, + { + "type": "user", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "username": "alice", + "created_at": "2022-01-16T14:40:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/users?count=false&page[first]=10", + "first": "/api/admin/v1/users?count=false&page[first]=10", + "last": "/api/admin/v1/users?count=false&page[last]=10" + } + } + "#); + + // Test count=only + let request = Request::get("/api/admin/v1/users?count=only") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "meta": { + "count": 2 + }, + "links": { + "self": "/api/admin/v1/users?count=only" + } + } + "###); + + // Test count=false with filtering + let request = Request::get("/api/admin/v1/users?count=false&filter[search]=alice") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "data": [ + { + "type": "user", + "id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "attributes": { + "username": "alice", + "created_at": "2022-01-16T14:40:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01FSHN9AG0MZAA6S4AF7CTV32E" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0MZAA6S4AF7CTV32E" + } + } + } + ], + "links": { + "self": "/api/admin/v1/users?filter[search]=alice&count=false&page[first]=10", + "first": "/api/admin/v1/users?filter[search]=alice&count=false&page[first]=10", + "last": "/api/admin/v1/users?filter[search]=alice&count=false&page[last]=10" + } + } + "#); + + // Test count=only with filtering + let request = Request::get("/api/admin/v1/users?count=only&filter[search]=alice") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "meta": { + "count": 1 + }, + "links": { + "self": "/api/admin/v1/users?filter[search]=alice&count=only" + } + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/lock.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/lock.rs new file mode 100644 index 00000000..6d6ccfcf --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/lock.rs @@ -0,0 +1,182 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, User}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("lockUser") + .summary("Lock a user") + .description("Calling this endpoint will lock the user, preventing them from doing any action. +This DOES NOT invalidate any existing session, meaning that all their existing sessions will work again as soon as they get unlocked.") + .tag("user") + .response_with::<200, Json>, _>(|t| { + // In the samples, the third user is the one locked + let [_alice, _bob, charlie, ..] = User::samples(); + let id = charlie.id(); + let response = SingleResponse::new(charlie, format!("/api/admin/v1/users/{id}/lock")); + t.description("User was locked").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User ID not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.lock", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let id = *id; + let user = repo + .user() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + let user = repo.user().lock(&clock, user).await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + User::from(user), + format!("/api/admin/v1/users/{id}/lock"), + ))) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use mas_data_model::Clock; + use mas_storage::{RepositoryAccess, user::UserRepository}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_lock_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::post(format!("/api/admin/v1/users/{}/lock", user.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The locked_at timestamp should be the same as the current time + assert_eq!( + body["data"]["attributes"]["locked_at"], + serde_json::json!(state.clock.now()) + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_lock_user_twice(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + let user = repo.user().lock(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + // Move the clock forward to make sure the locked_at timestamp doesn't change + state.clock.advance(Duration::try_minutes(1).unwrap()); + + let request = Request::post(format!("/api/admin/v1/users/{}/lock", user.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The locked_at timestamp should be different from the current time + assert_ne!( + body["data"]["attributes"]["locked_at"], + serde_json::json!(state.clock.now()) + ); + assert_ne!( + body["data"]["attributes"]["locked_at"], + serde_json::Value::Null + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_lock_unknown_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/users/01040G2081040G2081040G2081/lock") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "User ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/mod.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/mod.rs new file mode 100644 index 00000000..37484b75 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/mod.rs @@ -0,0 +1,29 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod add; +mod by_username; +mod deactivate; +mod get; +mod list; +mod lock; +mod reactivate; +mod set_admin; +mod set_password; +mod unlock; + +pub use self::{ + add::{doc as add_doc, handler as add}, + by_username::{doc as by_username_doc, handler as by_username}, + deactivate::{doc as deactivate_doc, handler as deactivate}, + get::{doc as get_doc, handler as get}, + list::{doc as list_doc, handler as list}, + lock::{doc as lock_doc, handler as lock}, + reactivate::{doc as reactivate_doc, handler as reactivate}, + set_admin::{doc as set_admin_doc, handler as set_admin}, + set_password::{doc as set_password_doc, handler as set_password}, + unlock::{doc as unlock_doc, handler as unlock}, +}; diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/reactivate.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/reactivate.rs new file mode 100644 index 00000000..835ef0b4 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/reactivate.rs @@ -0,0 +1,222 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, extract::State, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_matrix::HomeserverConnection; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, User}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error(transparent)] + Homeserver(anyhow::Error), + + #[error("User ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_) | Self::Homeserver(_)); + let status = match self { + Self::Internal(_) | Self::Homeserver(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("reactivateUser") + .summary("Reactivate a user") + .description("Calling this endpoint will reactivate a deactivated user. +This DOES NOT unlock a locked user, which is still prevented from doing any action until it is explicitly unlocked.") + .tag("user") + .response_with::<200, Json>, _>(|t| { + // In the samples, the third user is the one locked + let [sample, ..] = User::samples(); + let id = sample.id(); + let response = SingleResponse::new(sample, format!("/api/admin/v1/users/{id}/reactivate")); + t.description("User was reactivated").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User ID not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.reactivate", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + State(homeserver): State>, + id: UlidPathParam, +) -> Result>, RouteError> { + let id = *id; + let user = repo + .user() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + // Call the homeserver synchronously to reactivate the user + homeserver + .reactivate_user(&user.username) + .await + .map_err(RouteError::Homeserver)?; + + // Now reactivate the user in our database + let user = repo.user().reactivate(user).await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + User::from(user), + format!("/api/admin/v1/users/{id}/reactivate"), + ))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_data_model::Clock; + use mas_matrix::{HomeserverConnection, ProvisionRequest}; + use mas_storage::{RepositoryAccess, user::UserRepository}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_reactivate_deactivated_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool.clone()).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + let user = repo.user().lock(&state.clock, user).await.unwrap(); + let user = repo.user().deactivate(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + // Provision and immediately deactivate the user on the homeserver, + // because this endpoint will try to reactivate it + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + state + .homeserver_connection + .delete_user(&user.username, true) + .await + .unwrap(); + + // The user should be deactivated on the homeserver + let mx_user = state + .homeserver_connection + .query_user(&user.username) + .await + .unwrap(); + assert!(mx_user.deactivated); + + let request = Request::post(format!("/api/admin/v1/users/{}/reactivate", user.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + // The user should remain locked after being reactivated + assert_eq!( + body["data"]["attributes"]["locked_at"], + serde_json::json!(state.clock.now()) + ); + assert_eq!( + body["data"]["attributes"]["deactivated_at"], + serde_json::Value::Null, + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_reactivate_active_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool.clone()).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + // Provision the user on the homeserver + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + + let request = Request::post(format!("/api/admin/v1/users/{}/reactivate", user.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + assert_eq!( + body["data"]["attributes"]["locked_at"], + serde_json::Value::Null + ); + assert_eq!( + body["data"]["attributes"]["deactivated_at"], + serde_json::Value::Null + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_reactivate_unknown_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/users/01040G2081040G2081040G2081/reactivate") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "User ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/set_admin.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/set_admin.rs new file mode 100644 index 00000000..455fa798 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/set_admin.rs @@ -0,0 +1,161 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, User}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +/// # JSON payload for the `POST /api/admin/v1/users/:id/set-admin` endpoint +#[derive(Deserialize, JsonSchema)] +#[serde(rename = "UserSetAdminRequest")] +pub struct Request { + /// Whether the user can request admin privileges. + admin: bool, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("userSetAdmin") + .summary("Set whether a user can request admin") + .description("Calling this endpoint will not have any effect on existing sessions, meaning that their existing sessions will keep admin access if they were granted it.") + .tag("user") + .response_with::<200, Json>, _>(|t| { + // In the samples, the second user is the one which can request admin + let [_alice, bob, ..] = User::samples(); + let id = bob.id(); + let response = SingleResponse::new(bob, format!("/api/admin/v1/users/{id}/set-admin")); + t.description("User had admin privileges set").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User ID not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.set_admin", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, + Json(params): Json, +) -> Result>, RouteError> { + let id = *id; + let user = repo + .user() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + let user = repo + .user() + .set_can_request_admin(user, params.admin) + .await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + User::from(user), + format!("/api/admin/v1/users/{id}/set-admin"), + ))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_storage::{RepositoryAccess, user::UserRepository}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_change_can_request_admin(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + let request = Request::post(format!("/api/admin/v1/users/{}/set-admin", user.id)) + .bearer(&token) + .json(serde_json::json!({ + "admin": true, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + assert_eq!(body["data"]["attributes"]["admin"], true); + + // Look at the state from the repository + let mut repo = state.repository().await.unwrap(); + let user = repo.user().lookup(user.id).await.unwrap().unwrap(); + assert!(user.can_request_admin); + repo.save().await.unwrap(); + + // Flip it back + let request = Request::post(format!("/api/admin/v1/users/{}/set-admin", user.id)) + .bearer(&token) + .json(serde_json::json!({ + "admin": false, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + assert_eq!(body["data"]["attributes"]["admin"], false); + + // Look at the state from the repository + let mut repo = state.repository().await.unwrap(); + let user = repo.user().lookup(user.id).await.unwrap().unwrap(); + assert!(!user.can_request_admin); + repo.save().await.unwrap(); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/set_password.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/set_password.rs new file mode 100644 index 00000000..7e9365f7 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/set_password.rs @@ -0,0 +1,298 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{NoApi, OperationIo, transform::TransformOperation}; +use axum::{Json, extract::State, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::BoxRng; +use schemars::JsonSchema; +use serde::Deserialize; +use ulid::Ulid; +use zeroize::Zeroizing; + +use crate::{ + admin::{call_context::CallContext, params::UlidPathParam, response::ErrorResponse}, + impl_from_error_for_route, + passwords::PasswordManager, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Password is too weak")] + PasswordTooWeak, + + #[error("Password auth is disabled")] + PasswordAuthDisabled, + + #[error("Password hashing failed")] + Password(#[source] anyhow::Error), + + #[error("User ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_) | Self::Password(_)); + let status = match self { + Self::Internal(_) | Self::Password(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::PasswordAuthDisabled => StatusCode::FORBIDDEN, + Self::PasswordTooWeak => StatusCode::BAD_REQUEST, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +/// # JSON payload for the `POST /api/admin/v1/users/:id/set-password` endpoint +#[derive(Deserialize, JsonSchema)] +#[schemars(rename = "SetUserPasswordRequest")] +pub struct Request { + /// The password to set for the user + #[schemars(example = &"hunter2")] + password: String, + + /// Skip the password complexity check + skip_password_check: Option, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("setUserPassword") + .summary("Set the password for a user") + .tag("user") + .response_with::<204, (), _>(|t| t.description("Password was set")) + .response_with::<400, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::PasswordTooWeak); + t.description("Password is too weak").example(response) + }) + .response_with::<403, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::PasswordAuthDisabled); + t.description("Password auth is disabled in the server configuration") + .example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User was not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.set_password", skip_all)] +pub async fn handler( + CallContext { + mut repo, clock, .. + }: CallContext, + NoApi(mut rng): NoApi, + State(password_manager): State, + id: UlidPathParam, + Json(params): Json, +) -> Result { + if !password_manager.is_enabled() { + return Err(RouteError::PasswordAuthDisabled); + } + + let user = repo + .user() + .lookup(*id) + .await? + .ok_or(RouteError::NotFound(*id))?; + + let skip_password_check = params.skip_password_check.unwrap_or(false); + tracing::info!(skip_password_check, "skip_password_check"); + if !skip_password_check + && !password_manager + .is_password_complex_enough(¶ms.password) + .unwrap_or(false) + { + return Err(RouteError::PasswordTooWeak); + } + + let password = Zeroizing::new(params.password); + let (version, hashed_password) = password_manager + .hash(&mut rng, password) + .await + .map_err(RouteError::Password)?; + + repo.user_password() + .add(&mut rng, &clock, &user, version, hashed_password, None) + .await?; + + repo.save().await?; + + Ok(StatusCode::NO_CONTENT) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_storage::{RepositoryAccess, user::UserPasswordRepository}; + use sqlx::PgPool; + use zeroize::Zeroizing; + + use crate::{ + passwords::{PasswordManager, PasswordVerificationResult}, + test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}, + }; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_set_password(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Create a user + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + + // Double-check that the user doesn't have a password + let user_password = repo.user_password().active(&user).await.unwrap(); + assert!(user_password.is_none()); + + repo.save().await.unwrap(); + + let user_id = user.id; + + // Set the password through the API + let request = Request::post(format!("/api/admin/v1/users/{user_id}/set-password")) + .bearer(&token) + .json(serde_json::json!({ + "password": "this is a good enough password", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::NO_CONTENT); + + // Check that the user now has a password + let mut repo = state.repository().await.unwrap(); + let user_password = repo.user_password().active(&user).await.unwrap().unwrap(); + let password = Zeroizing::new(String::from("this is a good enough password")); + let res = state + .password_manager + .verify( + user_password.version, + password, + user_password.hashed_password, + ) + .await + .unwrap(); + assert_eq!(res, PasswordVerificationResult::Success(())); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_weak_password(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Create a user + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + let user_id = user.id; + + // Set a weak password through the API + let request = Request::post(format!("/api/admin/v1/users/{user_id}/set-password")) + .bearer(&token) + .json(serde_json::json!({ + "password": "password", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + + // Check that the user still has a password + let mut repo = state.repository().await.unwrap(); + let user_password = repo.user_password().active(&user).await.unwrap(); + assert!(user_password.is_none()); + repo.save().await.unwrap(); + + // Now try with the skip_password_check flag + let request = Request::post(format!("/api/admin/v1/users/{user_id}/set-password")) + .bearer(&token) + .json(serde_json::json!({ + "password": "password", + "skip_password_check": true, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::NO_CONTENT); + + // Check that the user now has a password + let mut repo = state.repository().await.unwrap(); + let user_password = repo.user_password().active(&user).await.unwrap().unwrap(); + let password = Zeroizing::new("password".to_owned()); + let res = state + .password_manager + .verify( + user_password.version, + password, + user_password.hashed_password, + ) + .await + .unwrap(); + assert_eq!(res, PasswordVerificationResult::Success(())); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unknown_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + // Set the password through the API + let request = Request::post("/api/admin/v1/users/01040G2081040G2081040G2081/set-password") + .bearer(&token) + .json(serde_json::json!({ + "password": "this is a good enough password", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "User ID 01040G2081040G2081040G2081 not found" + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_disabled(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + state.password_manager = PasswordManager::disabled(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/users/01040G2081040G2081040G2081/set-password") + .bearer(&token) + .json(serde_json::json!({ + "password": "hunter2", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + + let body: serde_json::Value = response.json(); + assert_eq!(body["errors"][0]["title"], "Password auth is disabled"); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/users/unlock.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/users/unlock.rs new file mode 100644 index 00000000..72987a9f --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/users/unlock.rs @@ -0,0 +1,213 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::{OperationIo, transform::TransformOperation}; +use axum::{Json, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use ulid::Ulid; + +use crate::{ + admin::{ + call_context::CallContext, + model::{Resource, User}, + params::UlidPathParam, + response::{ErrorResponse, SingleResponse}, + }, + impl_from_error_for_route, +}; + +#[derive(Debug, thiserror::Error, OperationIo)] +#[aide(output_with = "Json")] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("User ID {0} not found")] + NotFound(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let error = ErrorResponse::from_error(&self); + let sentry_event_id = record_error!(self, Self::Internal(_)); + let status = match self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + (status, sentry_event_id, Json(error)).into_response() + } +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("unlockUser") + .summary("Unlock a user") + .description("Calling this endpoint will lift restrictions on user actions that had imposed by locking. +This DOES NOT reactivate a deactivated user, which will remain unavailable until it is explicitly reactivated.") + .tag("user") + .response_with::<200, Json>, _>(|t| { + // In the samples, the third user is the one locked + let [sample, ..] = User::samples(); + let id = sample.id(); + let response = SingleResponse::new(sample, format!("/api/admin/v1/users/{id}/unlock")); + t.description("User was unlocked").example(response) + }) + .response_with::<404, RouteError, _>(|t| { + let response = ErrorResponse::from_error(&RouteError::NotFound(Ulid::nil())); + t.description("User ID not found").example(response) + }) +} + +#[tracing::instrument(name = "handler.admin.v1.users.unlock", skip_all)] +pub async fn handler( + CallContext { mut repo, .. }: CallContext, + id: UlidPathParam, +) -> Result>, RouteError> { + let id = *id; + let user = repo + .user() + .lookup(id) + .await? + .ok_or(RouteError::NotFound(id))?; + + let user = repo.user().unlock(user).await?; + + repo.save().await?; + + Ok(Json(SingleResponse::new( + User::from(user), + format!("/api/admin/v1/users/{id}/unlock"), + ))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_data_model::Clock; + use mas_matrix::{HomeserverConnection, ProvisionRequest}; + use mas_storage::{RepositoryAccess, user::UserRepository}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unlock_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + let user = repo.user().lock(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + // Also provision the user on the homeserver, because this endpoint will try to + // reactivate it + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + + let request = Request::post(format!("/api/admin/v1/users/{}/unlock", user.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + assert_eq!( + body["data"]["attributes"]["locked_at"], + serde_json::Value::Null + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unlock_deactivated_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + let user = repo.user().lock(&state.clock, user).await.unwrap(); + let user = repo.user().deactivate(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + // Provision the user on the homeserver + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + // but then deactivate it + state + .homeserver_connection + .delete_user(&user.username, true) + .await + .unwrap(); + + // The user should be deactivated on the homeserver + let mx_user = state + .homeserver_connection + .query_user(&user.username) + .await + .unwrap(); + assert!(mx_user.deactivated); + + let request = Request::post(format!("/api/admin/v1/users/{}/unlock", user.id)) + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + assert_eq!( + body["data"]["attributes"]["locked_at"], + serde_json::Value::Null + ); + // The user should remain deactivated + assert_eq!( + body["data"]["attributes"]["deactivated_at"], + serde_json::json!(state.clock.now()) + ); + let mx_user = state + .homeserver_connection + .query_user(&user.username) + .await + .unwrap(); + assert!(mx_user.deactivated); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_lock_unknown_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::post("/api/admin/v1/users/01040G2081040G2081040G2081/unlock") + .bearer(&token) + .empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::NOT_FOUND); + let body: serde_json::Value = response.json(); + assert_eq!( + body["errors"][0]["title"], + "User ID 01040G2081040G2081040G2081 not found" + ); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/admin/v1/version.rs b/matrix-authentication-service/crates/handlers/src/admin/v1/version.rs new file mode 100644 index 00000000..2fe53940 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/admin/v1/version.rs @@ -0,0 +1,62 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use aide::transform::TransformOperation; +use axum::{Json, extract::State}; +use mas_data_model::AppVersion; +use schemars::JsonSchema; +use serde::Serialize; + +use crate::admin::call_context::CallContext; + +#[derive(Serialize, JsonSchema)] +pub struct Version { + /// The semver version of the app + pub version: &'static str, +} + +pub fn doc(operation: TransformOperation) -> TransformOperation { + operation + .id("version") + .tag("server") + .summary("Get the version currently running") + .response_with::<200, Json, _>(|t| t.example(Version { version: "v1.0.0" })) +} + +#[tracing::instrument(name = "handler.admin.v1.version", skip_all)] +pub async fn handler( + _: CallContext, + State(AppVersion(version)): State, +) -> Json { + Json(Version { version }) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use insta::assert_json_snapshot; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_add_user(pool: PgPool) { + setup(); + let mut state = TestState::from_pool(pool).await.unwrap(); + let token = state.token_with_scope("urn:mas:admin").await; + + let request = Request::get("/api/admin/v1/version").bearer(&token).empty(); + + let response = state.request(request).await; + + assert_eq!(response.status(), StatusCode::OK); + let body: serde_json::Value = response.json(); + assert_json_snapshot!(body, @r#" + { + "version": "v0.0.0-test" + } + "#); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/bin/api-schema.rs b/matrix-authentication-service/crates/handlers/src/bin/api-schema.rs new file mode 100644 index 00000000..1b73c05c --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/bin/api-schema.rs @@ -0,0 +1,88 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![forbid(unsafe_code)] +#![deny( + clippy::all, + clippy::str_to_string, + rustdoc::broken_intra_doc_links, + clippy::future_not_send +)] +#![warn(clippy::pedantic)] + +use std::{io::Write, sync::Arc}; + +use aide::openapi::{Server, ServerVariable}; +use indexmap::IndexMap; + +/// This is a dummy state, it should never be used. +/// +/// We use it to generate the API schema, which doesn't execute any request. +#[derive(Clone)] +struct DummyState; + +macro_rules! impl_from_request_parts { + ($type:ty) => { + impl axum::extract::FromRequestParts for $type { + type Rejection = std::convert::Infallible; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + _state: &DummyState, + ) -> Result { + unimplemented!("This is a dummy state, it should never be used") + } + } + }; +} + +macro_rules! impl_from_ref { + ($type:ty) => { + impl axum::extract::FromRef for $type { + fn from_ref(_input: &DummyState) -> Self { + unimplemented!("This is a dummy state, it should never be used") + } + } + }; +} + +impl_from_request_parts!(mas_storage::BoxRepository); +impl_from_request_parts!(mas_data_model::BoxClock); +impl_from_request_parts!(mas_data_model::BoxRng); +impl_from_request_parts!(mas_handlers::BoundActivityTracker); +impl_from_ref!(mas_router::UrlBuilder); +impl_from_ref!(mas_templates::Templates); +impl_from_ref!(Arc); +impl_from_ref!(mas_keystore::Keystore); +impl_from_ref!(mas_handlers::passwords::PasswordManager); +impl_from_ref!(Arc); +impl_from_ref!(mas_data_model::SiteConfig); +impl_from_ref!(mas_data_model::AppVersion); + +fn main() -> Result<(), Box> { + let (mut api, _) = mas_handlers::admin_api_router::(); + + // Set the server list to a configurable base URL + api.servers = vec![Server { + url: "{base}".to_owned(), + variables: IndexMap::from([( + "base".to_owned(), + ServerVariable { + default: "/".to_owned(), + ..ServerVariable::default() + }, + )]), + ..Server::default() + }]; + + let mut stdout = std::io::stdout(); + serde_json::to_writer_pretty(&mut stdout, &api)?; + + // Make sure we end with a newline + stdout.write_all(b"\n")?; + + Ok(()) +} diff --git a/matrix-authentication-service/crates/handlers/src/bin/graphql-schema.rs b/matrix-authentication-service/crates/handlers/src/bin/graphql-schema.rs new file mode 100644 index 00000000..45bdcc17 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/bin/graphql-schema.rs @@ -0,0 +1,19 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![forbid(unsafe_code)] +#![deny( + clippy::all, + clippy::str_to_string, + rustdoc::broken_intra_doc_links, + clippy::future_not_send +)] +#![warn(clippy::pedantic)] + +fn main() { + let schema = mas_handlers::graphql_schema_builder().finish(); + println!("{}", schema.sdl()); +} diff --git a/matrix-authentication-service/crates/handlers/src/captcha.rs b/matrix-authentication-service/crates/handlers/src/captcha.rs new file mode 100644 index 00000000..c206df5c --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/captcha.rs @@ -0,0 +1,257 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use mas_data_model::{CaptchaConfig, CaptchaService}; +use mas_http::RequestBuilderExt as _; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +use crate::BoundActivityTracker; + +// https://developers.google.com/recaptcha/docs/verify#api_request +const RECAPTCHA_VERIFY_URL: &str = "https://www.google.com/recaptcha/api/siteverify"; + +// https://docs.hcaptcha.com/#verify-the-user-response-server-side +const HCAPTCHA_VERIFY_URL: &str = "https://api.hcaptcha.com/siteverify"; + +// https://developers.cloudflare.com/turnstile/get-started/server-side-validation/ +const CF_TURNSTILE_VERIFY_URL: &str = "https://challenges.cloudflare.com/turnstile/v0/siteverify"; + +#[derive(Debug, Error)] +pub enum Error { + #[error("A CAPTCHA response was expected, but none was provided")] + MissingCaptchaResponse, + + #[error("A CAPTCHA response was provided, but no CAPTCHA provider is configured")] + NoCaptchaConfigured, + + #[error("The CAPTCHA response provided is not valid for the configured service")] + CaptchaResponseMismatch, + + #[error("The CAPTCHA response provided is invalid: {0:?}")] + InvalidCaptcha(Vec), + + #[error("The CAPTCHA provider returned an invalid response")] + InvalidResponse, + + #[error( + "The hostname in the CAPTCHA response ({got:?}) does not match the site hostname ({expected:?})" + )] + HostnameMismatch { expected: String, got: String }, + + #[error("The CAPTCHA provider returned an error")] + RequestFailed(#[from] reqwest::Error), +} + +#[allow(clippy::struct_field_names)] +#[derive(Debug, Deserialize, Default)] +#[serde(rename_all = "kebab-case")] +pub struct Form { + g_recaptcha_response: Option, + h_captcha_response: Option, + cf_turnstile_response: Option, +} + +#[derive(Debug, Serialize)] +struct VerificationRequest<'a> { + secret: &'a str, + response: &'a str, + remoteip: Option, +} + +#[derive(Debug, Deserialize)] +struct VerificationResponse { + success: bool, + #[serde(rename = "error-codes")] + error_codes: Option>, + + challenge_ts: Option, + hostname: Option, +} + +#[derive(Debug, Deserialize, Clone, Copy)] +#[serde(rename_all = "kebab-case")] +pub enum ErrorCode { + /// The secret parameter is missing. + /// + /// Used by Cloudflare Turnstile, hCaptcha, reCAPTCHA + MissingInputSecret, + + /// The secret parameter is invalid or malformed. + /// + /// Used by Cloudflare Turnstile, hCaptcha, reCAPTCHA + InvalidInputSecret, + + /// The response parameter is missing. + /// + /// Used by Cloudflare Turnstile, hCaptcha, reCAPTCHA + MissingInputResponse, + + /// The response parameter is invalid or malformed. + /// + /// Used by Cloudflare Turnstile, hCaptcha, reCAPTCHA + InvalidInputResponse, + + /// The widget ID extracted from the parsed site secret key was invalid or + /// did not exist. + /// + /// Used by Cloudflare Turnstile + InvalidWidgetId, + + /// The secret extracted from the parsed site secret key was invalid. + /// + /// Used by Cloudflare Turnstile + InvalidParsedSecret, + + /// The request is invalid or malformed. + /// + /// Used by Cloudflare Turnstile, hCaptcha, reCAPTCHA + BadRequest, + + /// The remoteip parameter is missing. + /// + /// Used by hCaptcha + MissingRemoteip, + + /// The remoteip parameter is not a valid IP address or blinded value. + /// + /// Used by hCaptcha + InvalidRemoteip, + + /// The response parameter has already been checked, or has another issue. + /// + /// Used by hCaptcha + InvalidOrAlreadySeenResponse, + + /// You have used a testing sitekey but have not used its matching secret. + /// + /// Used by hCaptcha + NotUsingDummyPasscode, + + /// The sitekey is not registered with the provided secret. + /// + /// Used by hCaptcha + SitekeySecretMismatch, + + /// The response is no longer valid: either is too old or has been used + /// previously. + /// + /// Used by Cloudflare Turnstile, reCAPTCHA + TimeoutOrDisplicate, + + /// An internal error happened while validating the response. The request + /// can be retried. + /// + /// Used by Cloudflare Turnstile + InternalError, +} + +impl Form { + #[tracing::instrument( + skip_all, + name = "captcha.verify", + fields(captcha.hostname, captcha.challenge_ts, captcha.service), + )] + pub async fn verify( + &self, + activity_tracker: &BoundActivityTracker, + http_client: &reqwest::Client, + site_hostname: &str, + config: Option<&CaptchaConfig>, + ) -> Result<(), Error> { + let Some(config) = config else { + if self.g_recaptcha_response.is_some() + || self.h_captcha_response.is_some() + || self.cf_turnstile_response.is_some() + { + return Err(Error::NoCaptchaConfigured); + } + + return Ok(()); + }; + + let remoteip = activity_tracker.ip(); + let secret = &config.secret_key; + + let span = tracing::Span::current(); + span.record("captcha.service", tracing::field::debug(config.service)); + + let request = match ( + config.service, + &self.g_recaptcha_response, + &self.h_captcha_response, + &self.cf_turnstile_response, + ) { + (_, None, None, None) => return Err(Error::MissingCaptchaResponse), + + // reCAPTCHA v2 + (CaptchaService::RecaptchaV2, Some(response), None, None) => http_client + .post(RECAPTCHA_VERIFY_URL) + .form(&VerificationRequest { + secret, + response, + remoteip, + }), + + // hCaptcha + (CaptchaService::HCaptcha, None, Some(response), None) => http_client + .post(HCAPTCHA_VERIFY_URL) + .form(&VerificationRequest { + secret, + response, + remoteip, + }), + + // Cloudflare Turnstile + (CaptchaService::CloudflareTurnstile, None, None, Some(response)) => http_client + .post(CF_TURNSTILE_VERIFY_URL) + .form(&VerificationRequest { + secret, + response, + remoteip, + }), + + _ => return Err(Error::CaptchaResponseMismatch), + }; + + let response: VerificationResponse = request + .send_traced() + .await? + .error_for_status()? + .json() + .await?; + + if !response.success { + return Err(Error::InvalidCaptcha( + response.error_codes.unwrap_or_default(), + )); + } + + // If the response is successful, we should have both the hostname and the + // challenge_ts + let Some(hostname) = response.hostname else { + return Err(Error::InvalidResponse); + }; + + let Some(challenge_ts) = response.challenge_ts else { + return Err(Error::InvalidResponse); + }; + + span.record("captcha.hostname", &hostname); + span.record("captcha.challenge_ts", &challenge_ts); + + if hostname != site_hostname { + return Err(Error::HostnameMismatch { + expected: site_hostname.to_owned(), + got: hostname, + }); + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/cleanup_tests.rs b/matrix-authentication-service/crates/handlers/src/cleanup_tests.rs new file mode 100644 index 00000000..7b1a74f5 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/cleanup_tests.rs @@ -0,0 +1,774 @@ +// Copyright 2026 Element Creations Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Integration tests for session cleanup jobs. +//! +//! These tests verify that cleanup jobs correctly respect the session hierarchy +//! required for OIDC Backchannel Logout to function properly. +//! +//! Session hierarchy: +//! ```text +//! upstream_oauth_authorization_sessions (matched by sub/sid claims) +//! │ user_session_id +//! ▼ +//! user_sessions (browser sessions) +//! │ user_session_id FK +//! ┌────┴──────────────┐ +//! │ │ +//! ▼ ▼ +//! compat_sessions oauth2_sessions +//! ``` + +use chrono::Duration; +use hyper::{Request, StatusCode}; +use mas_data_model::{ + BrowserSession, Clock as _, CompatSession, Device, UpstreamOAuthAuthorizationSession, + UpstreamOAuthLink, UpstreamOAuthProvider, UpstreamOAuthProviderClaimsImports, + UpstreamOAuthProviderDiscoveryMode, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderPkceMode, UpstreamOAuthProviderTokenAuthMethod, User, +}; +use mas_iana::jose::JsonWebSignatureAlg; +use mas_jose::jwt::{JsonWebSignatureHeader, Jwt}; +use mas_storage::{ + RepositoryAccess, + queue::{DeactivateUserJob, QueueJobRepositoryExt}, + upstream_oauth2::UpstreamOAuthProviderParams, +}; +use oauth2_types::scope::{OPENID, Scope}; +use sqlx::PgPool; +use wiremock::{ + Mock, MockServer, ResponseTemplate, + matchers::{method, path}, +}; + +use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + +/// Helper struct to hold all the entities created for testing the session +/// hierarchy. +struct TestSessionHierarchy { + user: User, + browser_session: BrowserSession, + compat_session: Option, + oauth2_session: Option, + upstream_session: Option, + #[expect(dead_code)] + upstream_link: Option, + provider: Option, + #[expect(dead_code)] + mock_server: MockServer, +} + +const UPSTREAM_OAUTH_ISSUER: &str = "https://idp.example.com"; +const UPSTREAM_OAUTH_CLIENT_ID: &str = "test-client"; +const UPSTREAM_OAUTH_SESSION_ID: &str = "upstream-oauth-session-id"; +const UPSTREAM_OAUTH_SUBJECT: &str = "upstream-oauth-sub"; + +/// Create the complete session hierarchy for testing. +/// +/// This creates: +/// - A user +/// - A browser session +/// - Optionally a compat session linked to the browser session +/// - Optionally an OAuth 2.0 session linked to the browser session +/// - Optionally an upstream OAuth session linked to the browser session +async fn create_session_hierarchy( + state: &TestState, + with_compat: bool, + with_oauth2: bool, + with_upstream: bool, +) -> TestSessionHierarchy { + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + // Start a mock server to answer to JWKS requests for the backchannel logout + // tests + let mock_server = MockServer::start().await; + + let jwks = state.key_store.public_jwks(); + + let mock_jwks = Mock::given(method("GET")) + .and(path("jwks.json")) + .respond_with(ResponseTemplate::new(200).set_body_json(jwks)); + mock_server.register(mock_jwks).await; + + // Create user + let user = repo + .user() + .add(&mut rng, &state.clock, "testuser".to_owned()) + .await + .unwrap(); + + // Create browser session + let browser_session = repo + .browser_session() + .add(&mut rng, &state.clock, &user, None) + .await + .unwrap(); + + // Create compat session if requested + let compat_session = if with_compat { + let device = Device::generate(&mut rng); + let session = repo + .compat_session() + .add( + &mut rng, + &state.clock, + &user, + device, + Some(&browser_session), + false, + None, + ) + .await + .unwrap(); + Some(session) + } else { + None + }; + + // Create OAuth2 session if requested + let oauth2_session = if with_oauth2 { + // First create an OAuth2 client + let client = repo + .oauth2_client() + .add( + &mut rng, + &state.clock, + vec!["https://example.com/callback".parse().unwrap()], + None, + None, + None, + vec![], + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + ) + .await + .unwrap(); + + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut rng, + &state.clock, + &client, + &browser_session, + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + Some(session) + } else { + None + }; + + // Create upstream OAuth session if requested + let (provider, upstream_link, upstream_session) = if with_upstream { + let params = UpstreamOAuthProviderParams { + issuer: Some(UPSTREAM_OAUTH_ISSUER.to_owned()), + human_name: Some("Test IdP".to_owned()), + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::ClientSecretBasic, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + client_id: UPSTREAM_OAUTH_CLIENT_ID.to_owned(), + encrypted_client_secret: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + // Point to the mock server to have it use a JWKS we can use for signing + jwks_uri_override: Some(format!("{}/jwks.json", mock_server.uri()).parse().unwrap()), + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Disabled, + pkce_mode: UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: vec![], + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::LogoutAll, + }; + + let provider = repo + .upstream_oauth_provider() + .add(&mut rng, &state.clock, params) + .await + .unwrap(); + + // Create a link + let link = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider, + UPSTREAM_OAUTH_SUBJECT.to_owned(), + Some("test@idp.example.com".to_owned()), + ) + .await + .unwrap(); + + // Associate link to user + repo.upstream_oauth_link() + .associate_to_user(&link, &user) + .await + .unwrap(); + + // Create an upstream session + let session = repo + .upstream_oauth_session() + .add( + &mut rng, + &state.clock, + &provider, + "state123".to_owned(), + Some("verifier123".to_owned()), + Some("nonce123".to_owned()), + ) + .await + .unwrap(); + + // Complete the session with the link and ID token claims (including sub and + // sid) + let id_token_claims = serde_json::json!({ + "sub": UPSTREAM_OAUTH_SUBJECT, + "sid": UPSTREAM_OAUTH_SESSION_ID, + }); + let session = repo + .upstream_oauth_session() + .complete_with_link( + &state.clock, + session, + &link, + Some("fake-id-token".to_owned()), + Some(id_token_claims), + None, + None, + ) + .await + .unwrap(); + + // Consume the session and link it to the browser session + let session = repo + .upstream_oauth_session() + .consume(&state.clock, session, &browser_session) + .await + .unwrap(); + + (Some(provider), Some(link), Some(session)) + } else { + (None, None, None) + }; + + repo.save().await.unwrap(); + + TestSessionHierarchy { + user, + browser_session, + compat_session, + oauth2_session, + upstream_session, + upstream_link, + provider, + mock_server, + } +} + +/// Test that sessions finished less than 30 days ago are NOT deleted. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_cleanup_sessions_within_retention_preserved(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Create a full hierarchy with all session types + let hierarchy = create_session_hierarchy(&state, true, true, false).await; + + // Finish all sessions + let mut repo = state.repository().await.unwrap(); + let browser_session = repo + .browser_session() + .finish(&state.clock, hierarchy.browser_session) + .await + .unwrap(); + let compat_session = repo + .compat_session() + .finish(&state.clock, hierarchy.compat_session.unwrap()) + .await + .unwrap(); + let oauth2_session = repo + .oauth2_session() + .finish(&state.clock, hierarchy.oauth2_session.unwrap()) + .await + .unwrap(); + repo.save().await.unwrap(); + + // Wait one day and run the cleanup jobs a few times + state.clock.advance(Duration::try_days(1).unwrap()); + state.run_jobs_in_queue().await; + for _ in 0..5 { + state.clock.advance(Duration::try_hours(1).unwrap()); + state.run_jobs_in_queue().await; + } + + // Verify all sessions still exist + let mut repo = state.repository().await.unwrap(); + assert!( + repo.browser_session() + .lookup(browser_session.id) + .await + .unwrap() + .is_some(), + "Browser session should still exist" + ); + assert!( + repo.compat_session() + .lookup(compat_session.id) + .await + .unwrap() + .is_some(), + "Compat session should still exist" + ); + assert!( + repo.oauth2_session() + .lookup(oauth2_session.id) + .await + .unwrap() + .is_some(), + "OAuth2 session should still exist" + ); +} + +/// Test that deactivated users eventually get all their sessions cleaned up +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_cleanup_deactivated_users(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let mut rng = state.rng(); + + // Create a hierarchy with all session types + let hierarchy = create_session_hierarchy(&state, true, true, true).await; + + // Deactivate the user + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .lookup(hierarchy.user.id) + .await + .unwrap() + .unwrap(); + let user = repo.user().deactivate(&state.clock, user).await.unwrap(); + repo.queue_job() + .schedule_job(&mut rng, &state.clock, DeactivateUserJob::new(&user, false)) + .await + .unwrap(); + repo.save().await.unwrap(); + + state.run_jobs_in_queue().await; + + // Verify all sessions are finished + let mut repo = state.repository().await.unwrap(); + assert!( + repo.compat_session() + .lookup(hierarchy.compat_session.as_ref().unwrap().id) + .await + .unwrap() + .unwrap() + .is_finished(), + "Compat session should be finished" + ); + assert!( + repo.oauth2_session() + .lookup(hierarchy.oauth2_session.as_ref().unwrap().id) + .await + .unwrap() + .unwrap() + .is_finished(), + "OAuth2 session should be finished" + ); + assert!( + repo.browser_session() + .lookup(hierarchy.browser_session.id) + .await + .unwrap() + .unwrap() + .finished_at + .is_some(), + "Browser session should be there" + ); + assert!( + repo.upstream_oauth_session() + .lookup(hierarchy.upstream_session.as_ref().unwrap().id) + .await + .unwrap() + .is_some(), + "Upstream OAuth session should be there" + ); + + // Wait 31 days and run the cleanup jobs a few times + state.clock.advance(Duration::try_days(31).unwrap()); + state.run_jobs_in_queue().await; + for _ in 0..5 { + state.clock.advance(Duration::try_hours(1).unwrap()); + state.run_jobs_in_queue().await; + } + + // Verify all sessions are deleted + let mut repo = state.repository().await.unwrap(); + assert!( + repo.compat_session() + .lookup(hierarchy.compat_session.as_ref().unwrap().id) + .await + .unwrap() + .is_none(), + "Compat session should be deleted" + ); + assert!( + repo.oauth2_session() + .lookup(hierarchy.oauth2_session.as_ref().unwrap().id) + .await + .unwrap() + .is_none(), + "OAuth2 session should be deleted" + ); + assert!( + repo.browser_session() + .lookup(hierarchy.browser_session.id) + .await + .unwrap() + .is_none(), + "Browser session should be deleted" + ); + assert!( + repo.upstream_oauth_session() + .lookup(hierarchy.upstream_session.as_ref().unwrap().id) + .await + .unwrap() + .is_none(), + "Upstream OAuth session should be deleted" + ); +} + +/// Test that sessions finished more than 30 days ago ARE deleted. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_cleanup_sessions_after_retention_deleted(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Create hierarchy with compat and oauth2 sessions + let hierarchy = create_session_hierarchy(&state, true, true, false).await; + + // Finish all sessions + let mut repo = state.repository().await.unwrap(); + let browser_session = repo + .browser_session() + .finish(&state.clock, hierarchy.browser_session) + .await + .unwrap(); + let compat_session = repo + .compat_session() + .finish(&state.clock, hierarchy.compat_session.unwrap()) + .await + .unwrap(); + let oauth2_session = repo + .oauth2_session() + .finish(&state.clock, hierarchy.oauth2_session.unwrap()) + .await + .unwrap(); + repo.save().await.unwrap(); + + // Wait 31 days and run the cleanup jobs a few times + state.clock.advance(Duration::try_days(31).unwrap()); + state.run_jobs_in_queue().await; + for _ in 0..5 { + state.clock.advance(Duration::try_hours(1).unwrap()); + state.run_jobs_in_queue().await; + } + + // Verify all sessions are deleted + let mut repo = state.repository().await.unwrap(); + assert!( + repo.compat_session() + .lookup(compat_session.id) + .await + .unwrap() + .is_none(), + "Compat session should be deleted" + ); + assert!( + repo.oauth2_session() + .lookup(oauth2_session.id) + .await + .unwrap() + .is_none(), + "OAuth2 session should be deleted" + ); + // Browser session should also be deleted since children are gone + assert!( + repo.browser_session() + .lookup(browser_session.id) + .await + .unwrap() + .is_none(), + "Browser session should be deleted after children are gone" + ); +} + +/// Test that user sessions with remaining child sessions are NOT deleted. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_cleanup_user_session_blocked_by_child_sessions(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Create hierarchy with compat session only + let hierarchy = create_session_hierarchy(&state, true, false, false).await; + + // Finish only the browser session (not the compat session) + let mut repo = state.repository().await.unwrap(); + let browser_session = repo + .browser_session() + .finish(&state.clock, hierarchy.browser_session) + .await + .unwrap(); + repo.save().await.unwrap(); + + let compat_session_id = hierarchy.compat_session.as_ref().unwrap().id; + + // Wait 31 days and run the cleanup jobs a few times + state.clock.advance(Duration::try_days(31).unwrap()); + state.run_jobs_in_queue().await; + for _ in 0..5 { + state.clock.advance(Duration::try_hours(1).unwrap()); + state.run_jobs_in_queue().await; + } + + // Verify browser session still exists because compat session is still active + let mut repo = state.repository().await.unwrap(); + assert!( + repo.browser_session() + .lookup(browser_session.id) + .await + .unwrap() + .is_some(), + "Browser session should NOT be deleted because it has an active child session" + ); + assert!( + repo.compat_session() + .lookup(compat_session_id) + .await + .unwrap() + .is_some(), + "Compat session should still exist (not finished)" + ); +} + +/// Test that backchannel logout can find sessions before cleanup. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_backchannel_logout_works_before_cleanup(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Create hierarchy with upstream session + let hierarchy = create_session_hierarchy(&state, true, true, true).await; + + let provider = hierarchy.provider.as_ref().unwrap(); + + // The edge case we're trying to make works, is that if the browser session + // is finished for 30 days but *not* the child sessions, that browser + // session and the upstream sessions stay there so that backchannel logout + // still works + let mut repo = state.repository().await.unwrap(); + let browser_session = repo + .browser_session() + .lookup(hierarchy.browser_session.id) + .await + .unwrap() + .unwrap(); + repo.browser_session() + .finish(&state.clock, browser_session) + .await + .unwrap(); + repo.save().await.unwrap(); + + // Now wait 31 days and run the cleanup jobs a few times + state.clock.advance(Duration::try_days(31).unwrap()); + state.run_jobs_in_queue().await; + for _ in 0..5 { + state.clock.advance(Duration::try_hours(1).unwrap()); + state.run_jobs_in_queue().await; + } + + // Now let's craft a backchannel logout request + let ts = state.clock.now().timestamp(); + let payload = serde_json::json!({ + "iss": UPSTREAM_OAUTH_ISSUER, + "aud": UPSTREAM_OAUTH_CLIENT_ID, + "sub": UPSTREAM_OAUTH_SUBJECT, + "sid": UPSTREAM_OAUTH_SESSION_ID, + "jti": "iswearthisisrandom", + "iat": ts, + "exp": ts + 300, + "events": { + "http://schemas.openid.net/event/backchannel-logout": {} + } + }); + + let key = state + .key_store + .signing_key_for_algorithm(&JsonWebSignatureAlg::Rs256) + .unwrap(); + let signer = key + .params() + .signing_key_for_alg(&JsonWebSignatureAlg::Rs256) + .unwrap(); + let signed = Jwt::sign( + JsonWebSignatureHeader::new(JsonWebSignatureAlg::Rs256), + payload, + &signer, + ) + .unwrap(); + + let request = Request::post(format!("/upstream/backchannel-logout/{}", provider.id)).form( + serde_json::json!({ + "logout_token": signed.as_str(), + }), + ); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + // The session should still exist, but are finished + let mut repo = state.repository().await.unwrap(); + assert!( + !repo + .browser_session() + .lookup(hierarchy.browser_session.id) + .await + .unwrap() + .unwrap() + .active(), + "Inactive browser session should not be cleaned up" + ); + assert!( + repo.compat_session() + .lookup(hierarchy.compat_session.as_ref().unwrap().id) + .await + .unwrap() + .unwrap() + .is_finished(), + "Active compat session should not be cleaned up" + ); + assert!( + repo.oauth2_session() + .lookup(hierarchy.oauth2_session.as_ref().unwrap().id) + .await + .unwrap() + .unwrap() + .is_finished(), + "Active OAuth2 session should not be cleaned up" + ); + + // Wait again, then the sessions should be completely deleted + state.clock.advance(Duration::try_days(31).unwrap()); + state.run_jobs_in_queue().await; + for _ in 0..5 { + state.clock.advance(Duration::try_hours(1).unwrap()); + state.run_jobs_in_queue().await; + } + + let mut repo = state.repository().await.unwrap(); + assert!( + repo.browser_session() + .lookup(hierarchy.browser_session.id) + .await + .unwrap() + .is_none(), + "Browser session should be deleted" + ); + assert!( + repo.compat_session() + .lookup(hierarchy.compat_session.as_ref().unwrap().id) + .await + .unwrap() + .is_none(), + "Compat session should be deleted" + ); + assert!( + repo.oauth2_session() + .lookup(hierarchy.oauth2_session.as_ref().unwrap().id) + .await + .unwrap() + .is_none(), + "OAuth2 session should be deleted" + ); + assert!( + repo.upstream_oauth_session() + .lookup(hierarchy.upstream_session.as_ref().unwrap().id) + .await + .unwrap() + .is_none(), + "Upstream OAuth session should be deleted" + ); +} + +/// Test that active sessions are not cleaned up even after retention period. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_active_sessions_not_cleaned_up(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Create hierarchy without finishing any sessions + let hierarchy = create_session_hierarchy(&state, true, true, false).await; + + let browser_session_id = hierarchy.browser_session.id; + let compat_session_id = hierarchy.compat_session.as_ref().unwrap().id; + let oauth2_session_id = hierarchy.oauth2_session.as_ref().unwrap().id; + + // Wait 31 days and run the cleanup jobs a few times + state.clock.advance(Duration::try_days(31).unwrap()); + state.run_jobs_in_queue().await; + for _ in 0..5 { + state.clock.advance(Duration::try_hours(1).unwrap()); + state.run_jobs_in_queue().await; + } + + // All sessions should still exist because they're active + let mut repo = state.repository().await.unwrap(); + assert!( + repo.browser_session() + .lookup(browser_session_id) + .await + .unwrap() + .is_some(), + "Active browser session should not be cleaned up" + ); + assert!( + repo.compat_session() + .lookup(compat_session_id) + .await + .unwrap() + .is_some(), + "Active compat session should not be cleaned up" + ); + assert!( + repo.oauth2_session() + .lookup(oauth2_session_id) + .await + .unwrap() + .is_some(), + "Active OAuth2 session should not be cleaned up" + ); +} diff --git a/matrix-authentication-service/crates/handlers/src/compat/login.rs b/matrix-authentication-service/crates/handlers/src/compat/login.rs new file mode 100644 index 00000000..ebb5d32c --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/compat/login.rs @@ -0,0 +1,1515 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::{Arc, LazyLock}; + +use axum::{Json, extract::State, response::IntoResponse}; +use axum_extra::typed_header::TypedHeader; +use chrono::Duration; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::{ + BoxClock, BoxRng, Clock, CompatSession, CompatSsoLoginState, Device, SiteConfig, TokenType, + User, +}; +use mas_matrix::HomeserverConnection; +use mas_policy::{Policy, Requester, ViolationCode, model::CompatLogin}; +use mas_storage::{ + BoxRepository, BoxRepositoryFactory, RepositoryAccess, + compat::{ + CompatAccessTokenRepository, CompatRefreshTokenRepository, CompatSessionRepository, + CompatSsoLoginRepository, + }, + queue::{QueueJobRepositoryExt as _, SyncDevicesJob}, + user::{UserPasswordRepository, UserRepository}, +}; +use opentelemetry::{Key, KeyValue, metrics::Counter}; +use rand::{CryptoRng, RngCore}; +use serde::{Deserialize, Serialize}; +use serde_with::{DurationMilliSeconds, serde_as, skip_serializing_none}; +use thiserror::Error; +use zeroize::Zeroizing; + +use super::{MatrixError, MatrixJsonBody}; +use crate::{ + BoundActivityTracker, Limiter, METER, RequesterFingerprint, impl_from_error_for_route, + passwords::{PasswordManager, PasswordVerificationResult}, + rate_limit::PasswordCheckLimitedError, + session::count_user_sessions_for_limiting, +}; + +static LOGIN_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.compat.login_request") + .with_description("How many compatibility login requests have happened") + .with_unit("{request}") + .build() +}); +const TYPE: Key = Key::from_static_str("type"); +const RESULT: Key = Key::from_static_str("result"); + +#[derive(Debug, Serialize)] +#[serde(tag = "type")] +enum LoginType { + #[serde(rename = "m.login.password")] + Password, + + // we will leave MSC3824 `actions` as undefined for this auth type as unclear + // how it should be interpreted + #[serde(rename = "m.login.token")] + Token, + + #[serde(rename = "m.login.sso")] + Sso { + #[serde(skip_serializing_if = "Vec::is_empty")] + identity_providers: Vec, + oauth_aware_preferred: bool, + /// DEPRECATED: Use `oauth_aware_preferred` instead. We will remove this + /// once enough clients support the stable name `oauth_aware_preferred`. + #[serde(rename = "org.matrix.msc3824.delegated_oidc_compatibility")] + unstable_delegated_oidc_compatibility: bool, + }, +} + +#[derive(Debug, Serialize)] +struct SsoIdentityProvider { + id: &'static str, + name: &'static str, +} + +#[derive(Debug, Serialize)] +struct LoginTypes { + flows: Vec, +} + +#[tracing::instrument(name = "handlers.compat.login.get", skip_all)] +pub(crate) async fn get(State(password_manager): State) -> impl IntoResponse { + let flows = if password_manager.is_enabled() { + vec![ + LoginType::Password, + LoginType::Sso { + identity_providers: vec![], + oauth_aware_preferred: true, + unstable_delegated_oidc_compatibility: true, + }, + LoginType::Token, + ] + } else { + vec![ + LoginType::Sso { + identity_providers: vec![], + oauth_aware_preferred: true, + unstable_delegated_oidc_compatibility: true, + }, + LoginType::Token, + ] + }; + + let res = LoginTypes { flows }; + + Json(res) +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct RequestBody { + #[serde(flatten)] + credentials: Credentials, + + #[serde(default)] + refresh_token: bool, + + /// ID of the client device. + /// If this does not correspond to a known client device, a new device will + /// be created. The given device ID must not be the same as a + /// cross-signing key ID. The server will auto-generate a `device_id` if + /// this is not specified. + #[serde(default, skip_serializing_if = "Option::is_none")] + device_id: Option, + + #[serde(default, skip_serializing_if = "Option::is_none")] + initial_device_display_name: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum Credentials { + #[serde(rename = "m.login.password")] + Password { + identifier: Option, + // This property has been deprecated for a while, but some tools still use it. + user: Option, + password: String, + }, + + #[serde(rename = "m.login.token")] + Token { token: String }, + + #[serde(other)] + Unsupported, +} + +impl Credentials { + fn login_type(&self) -> &'static str { + match self { + Self::Password { .. } => "m.login.password", + Self::Token { .. } => "m.login.token", + Self::Unsupported => "unsupported", + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum Identifier { + #[serde(rename = "m.id.user")] + User { user: String }, + + #[serde(other)] + Unsupported, +} + +#[skip_serializing_none] +#[serde_as] +#[derive(Debug, Serialize, Deserialize)] +pub struct ResponseBody { + access_token: String, + device_id: Option, + user_id: String, + refresh_token: Option, + #[serde_as(as = "Option>")] + expires_in_ms: Option, +} + +#[derive(Debug, Error)] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("unsupported login method")] + Unsupported, + + #[error("unsupported identifier type")] + UnsupportedIdentifier, + + #[error("missing property 'identifier'")] + MissingIdentifier, + + #[error("user not found")] + UserNotFound, + + #[error("user has no password")] + NoPassword, + + #[error("password verification failed")] + PasswordMismatch, + + #[error("request rate limited")] + RateLimited(#[from] PasswordCheckLimitedError), + + #[error("login took too long")] + LoginTookTooLong, + + #[error("invalid login token")] + InvalidLoginToken, + + #[error("user is locked")] + UserLocked, + + #[error("failed to provision device")] + ProvisionDeviceFailed(#[source] anyhow::Error), + + #[error("login rejected by policy")] + PolicyRejected, + + #[error("login rejected by policy (hard session limit reached)")] + PolicyHardSessionLimitReached, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_policy::EvaluationError); + +impl From for RouteError { + fn from(err: anyhow::Error) -> Self { + Self::Internal(err.into()) + } +} + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = + record_error!(self, Self::Internal(_) | Self::ProvisionDeviceFailed(_)); + LOGIN_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + let response = match self { + Self::Internal(_) | Self::ProvisionDeviceFailed(_) => MatrixError { + errcode: "M_UNKNOWN", + error: "Internal server error", + status: StatusCode::INTERNAL_SERVER_ERROR, + }, + Self::RateLimited(_) => MatrixError { + errcode: "M_LIMIT_EXCEEDED", + error: "Too many login attempts", + status: StatusCode::TOO_MANY_REQUESTS, + }, + Self::Unsupported => MatrixError { + errcode: "M_UNKNOWN", + error: "Invalid login type", + status: StatusCode::BAD_REQUEST, + }, + Self::UnsupportedIdentifier => MatrixError { + errcode: "M_UNKNOWN", + error: "Unsupported login identifier", + status: StatusCode::BAD_REQUEST, + }, + Self::MissingIdentifier => MatrixError { + errcode: "M_BAD_JSON", + error: "Missing property 'identifier", + status: StatusCode::BAD_REQUEST, + }, + Self::UserNotFound | Self::NoPassword | Self::PasswordMismatch => MatrixError { + errcode: "M_FORBIDDEN", + error: "Invalid username/password", + status: StatusCode::FORBIDDEN, + }, + Self::LoginTookTooLong => MatrixError { + errcode: "M_FORBIDDEN", + error: "Login token expired", + status: StatusCode::FORBIDDEN, + }, + Self::InvalidLoginToken => MatrixError { + errcode: "M_FORBIDDEN", + error: "Invalid login token", + status: StatusCode::FORBIDDEN, + }, + Self::UserLocked => MatrixError { + errcode: "M_USER_LOCKED", + error: "User account has been locked", + status: StatusCode::UNAUTHORIZED, + }, + Self::PolicyRejected => MatrixError { + errcode: "M_FORBIDDEN", + error: "Login denied by the policy enforced by this service", + status: StatusCode::FORBIDDEN, + }, + Self::PolicyHardSessionLimitReached => MatrixError { + errcode: "M_FORBIDDEN", + error: "You have reached your hard device limit. Please visit your account page to sign some out.", + status: StatusCode::FORBIDDEN, + }, + }; + + (sentry_event_id, response).into_response() + } +} + +#[tracing::instrument(name = "handlers.compat.login.post", skip_all)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + State(password_manager): State, + State(repository_factory): State, + activity_tracker: BoundActivityTracker, + State(homeserver): State>, + State(site_config): State, + State(limiter): State, + mut policy: Policy, + requester: RequesterFingerprint, + user_agent: Option>, + MatrixJsonBody(input): MatrixJsonBody, +) -> Result { + let user_agent = user_agent.map(|ua| ua.as_str().to_owned()); + let login_type = input.credentials.login_type(); + let mut repo = repository_factory.create().await?; + let (mut session, user) = match (password_manager.is_enabled(), input.credentials) { + ( + true, + Credentials::Password { + identifier, + user, + password, + }, + ) => { + // This is to support both the (very) old and deprecated 'user' property, with + // the same behavior as Synapse: it takes precendence over the 'identifier' if + // provided + let user = match (identifier, user) { + (Some(Identifier::User { user }), None) | (_, Some(user)) => user, + (Some(Identifier::Unsupported), None) => { + return Err(RouteError::UnsupportedIdentifier); + } + (None, None) => { + return Err(RouteError::MissingIdentifier); + } + }; + + // Try getting the localpart out of the MXID + let username = homeserver.localpart(&user).unwrap_or(&user); + + user_password_login( + &mut rng, + &clock, + &password_manager, + &limiter, + requester, + &mut repo, + &mut policy, + Requester { + ip_address: activity_tracker.ip(), + user_agent: user_agent.clone(), + }, + username, + password, + input.device_id, // TODO check for validity + input.initial_device_display_name, + ) + .await? + } + + (_, Credentials::Token { token }) => { + token_login( + &mut rng, + &clock, + &mut repo, + &mut policy, + Requester { + ip_address: activity_tracker.ip(), + user_agent: user_agent.clone(), + }, + &token, + input.device_id, + input.initial_device_display_name, + ) + .await? + } + + _ => { + return Err(RouteError::Unsupported); + } + }; + + if let Some(user_agent) = user_agent { + session = repo + .compat_session() + .record_user_agent(session, user_agent) + .await?; + } + + let user_id = homeserver.mxid(&user.username); + + // If the client asked for a refreshable token, make it expire + let expires_in = if input.refresh_token { + Some(site_config.compat_token_ttl) + } else { + None + }; + + let access_token = TokenType::CompatAccessToken.generate(&mut rng); + let access_token = repo + .compat_access_token() + .add(&mut rng, &clock, &session, access_token, expires_in) + .await?; + + let refresh_token = if input.refresh_token { + let refresh_token = TokenType::CompatRefreshToken.generate(&mut rng); + let refresh_token = repo + .compat_refresh_token() + .add(&mut rng, &clock, &session, &access_token, refresh_token) + .await?; + Some(refresh_token.token) + } else { + None + }; + + // Ideally, we'd keep the lock whilst we actually create the device, but we + // really want to stop holding the transaction while we talk to the + // homeserver. + // + // In practice, this is fine, because: + // - the session exists after we commited the transaction, so a sync job won't + // try to delete it + // - we've acquired a lock on the user before creating the session, meaning + // we've made sure that sync jobs finished before we create the new session + // - we're in the read-commited isolation level, which means the sync will see + // what we've committed and won't try to delete the session once we release + // the lock + repo.save().await?; + + activity_tracker + .record_compat_session(&clock, &session) + .await; + + // This session will have for sure the device on it, both methods create a + // device + let Some(device) = &session.device else { + unreachable!() + }; + + // Now we can create the device on the homeserver, without holding the + // transaction + if let Err(err) = homeserver + .upsert_device( + &user.username, + device.as_str(), + session.human_name.as_deref(), + ) + .await + { + // Something went wrong, let's end this session and schedule a device sync + let mut repo = repository_factory.create().await?; + let session = repo.compat_session().finish(&clock, session).await?; + + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + SyncDevicesJob::new_for_id(session.user_id), + ) + .await?; + + repo.save().await?; + + return Err(RouteError::ProvisionDeviceFailed(err)); + } + + LOGIN_COUNTER.add( + 1, + &[ + KeyValue::new(TYPE, login_type), + KeyValue::new(RESULT, "success"), + ], + ); + + Ok(Json(ResponseBody { + access_token: access_token.token, + device_id: session.device, + user_id, + refresh_token, + expires_in_ms: expires_in, + })) +} + +async fn token_login( + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + repo: &mut BoxRepository, + policy: &mut Policy, + requester: Requester, + token: &str, + requested_device_id: Option, + initial_device_display_name: Option, +) -> Result<(CompatSession, User), RouteError> { + let login = repo + .compat_sso_login() + .find_by_token(token) + .await? + .ok_or(RouteError::InvalidLoginToken)?; + + let now = clock.now(); + let browser_session_id = match login.state { + CompatSsoLoginState::Pending => { + tracing::error!( + compat_sso_login.id = %login.id, + "Exchanged a token for a login that was not fullfilled yet" + ); + return Err(RouteError::InvalidLoginToken); + } + CompatSsoLoginState::Fulfilled { + fulfilled_at, + browser_session_id, + .. + } => { + if now > fulfilled_at + Duration::microseconds(30 * 1000 * 1000) { + return Err(RouteError::LoginTookTooLong); + } + + browser_session_id + } + CompatSsoLoginState::Exchanged { + exchanged_at, + compat_session_id, + .. + } => { + if now > exchanged_at + Duration::microseconds(30 * 1000 * 1000) { + // TODO: log that session out + tracing::error!( + compat_sso_login.id = %login.id, + compat_session.id = %compat_session_id, + "Login token exchanged a second time more than 30s after" + ); + } + + return Err(RouteError::InvalidLoginToken); + } + }; + + let Some(browser_session) = repo.browser_session().lookup(browser_session_id).await? else { + tracing::error!( + compat_sso_login.id = %login.id, + browser_session.id = %browser_session_id, + "Attempt to exchange login token but no associated browser session found" + ); + return Err(RouteError::InvalidLoginToken); + }; + if !browser_session.active() || !browser_session.user.is_valid() { + tracing::info!( + compat_sso_login.id = %login.id, + browser_session.id = %browser_session_id, + "Attempt to exchange login token but browser session is not active" + ); + return Err( + if browser_session.finished_at.is_some() + || browser_session.user.deactivated_at.is_some() + { + RouteError::InvalidLoginToken + } else { + RouteError::UserLocked + }, + ); + } + + // We're about to create a device, let's explicitly acquire a lock, so that + // any concurrent sync will read after we've committed + repo.user() + .acquire_lock_for_sync(&browser_session.user) + .await?; + + let device = if let Some(requested_device_id) = requested_device_id { + Device::from(requested_device_id) + } else { + Device::generate(rng) + }; + + let session_replaced = repo + .app_session() + .finish_sessions_to_replace_device(clock, &browser_session.user, &device) + .await?; + + let session_counts = count_user_sessions_for_limiting(repo, &browser_session.user).await?; + + let res = policy + .evaluate_compat_login(mas_policy::CompatLoginInput { + user: &browser_session.user, + login: CompatLogin::Token, + session_replaced, + session_counts, + requester, + }) + .await?; + if !res.valid() { + // If the only violation is that we have too many sessions, then handle that + // separately. + // In the future, we intend to evict some sessions automatically instead. We + // don't trigger this if there was some other violation anyway, since that means + // that removing a session wouldn't actually unblock the login. + if res.violations.len() == 1 { + let violation = &res.violations[0]; + if violation.code == Some(ViolationCode::TooManySessions) { + // The only violation is having reached the session limit. + return Err(RouteError::PolicyHardSessionLimitReached); + } + } + return Err(RouteError::PolicyRejected); + } + + // We first create the session in the database, commit the transaction, then + // create it on the homeserver, scheduling a device sync job afterwards to + // make sure we don't end up in an inconsistent state. + let compat_session = repo + .compat_session() + .add( + rng, + clock, + &browser_session.user, + device, + Some(&browser_session), + false, + initial_device_display_name, + ) + .await?; + + repo.compat_sso_login() + .exchange(clock, login, &compat_session) + .await?; + + Ok((compat_session, browser_session.user)) +} + +async fn user_password_login( + mut rng: &mut (impl RngCore + CryptoRng + Send), + clock: &impl Clock, + password_manager: &PasswordManager, + limiter: &Limiter, + requester: RequesterFingerprint, + repo: &mut BoxRepository, + policy: &mut Policy, + policy_requester: Requester, + username: &str, + password: String, + requested_device_id: Option, + initial_device_display_name: Option, +) -> Result<(CompatSession, User), RouteError> { + // Find the user + let user = repo + .user() + .find_by_username(username) + .await? + .filter(|user| user.deactivated_at.is_none()) + .ok_or(RouteError::UserNotFound)?; + + if user.locked_at.is_some() { + return Err(RouteError::UserLocked); + } + + // Check the rate limit + limiter.check_password(requester, &user)?; + + // Lookup its password + let user_password = repo + .user_password() + .active(&user) + .await? + .ok_or(RouteError::NoPassword)?; + + // Verify the password + let password = Zeroizing::new(password); + + match password_manager + .verify_and_upgrade( + &mut rng, + user_password.version, + password, + user_password.hashed_password.clone(), + ) + .await? + { + PasswordVerificationResult::Success(Some((version, hashed_password))) => { + // Save the upgraded password if needed + repo.user_password() + .add( + &mut rng, + clock, + &user, + version, + hashed_password, + Some(&user_password), + ) + .await?; + } + PasswordVerificationResult::Success(None) => {} + PasswordVerificationResult::Failure => { + return Err(RouteError::PasswordMismatch); + } + } + + // We're about to create a device, let's explicitly acquire a lock, so that + // any concurrent sync will read after we've committed + repo.user().acquire_lock_for_sync(&user).await?; + + // Now that the user credentials have been verified, start a new compat session + let device = if let Some(requested_device_id) = requested_device_id { + Device::from(requested_device_id) + } else { + Device::generate(&mut rng) + }; + + let session_replaced = repo + .app_session() + .finish_sessions_to_replace_device(clock, &user, &device) + .await?; + + let session_counts = count_user_sessions_for_limiting(repo, &user).await?; + + let res = policy + .evaluate_compat_login(mas_policy::CompatLoginInput { + user: &user, + login: CompatLogin::Password, + session_replaced, + session_counts, + requester: policy_requester, + }) + .await?; + if !res.valid() { + // If the only violation is that we have too many sessions, then handle that + // separately. + // In the future, we intend to evict some sessions automatically instead. We + // don't trigger this if there was some other violation anyway, since that means + // that removing a session wouldn't actually unblock the login. + if res.violations.len() == 1 { + let violation = &res.violations[0]; + if violation.code == Some(ViolationCode::TooManySessions) { + // The only violation is having reached the session limit. + return Err(RouteError::PolicyHardSessionLimitReached); + } + } + return Err(RouteError::PolicyRejected); + } + + let session = repo + .compat_session() + .add( + &mut rng, + clock, + &user, + device, + None, + false, + initial_device_display_name, + ) + .await?; + + Ok((session, user)) +} + +#[cfg(test)] +mod tests { + use hyper::Request; + use mas_matrix::{HomeserverConnection, ProvisionRequest}; + use rand::distributions::{Alphanumeric, DistString}; + use sqlx::PgPool; + + use super::*; + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup, test_site_config}; + + /// Test that the server advertises the right login flows. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get_login(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Now let's get the login flows + let request = Request::get("/_matrix/client/v3/login").empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + insta::assert_json_snapshot!(body, @r###" + { + "flows": [ + { + "type": "m.login.password" + }, + { + "type": "m.login.sso", + "oauth_aware_preferred": true, + "org.matrix.msc3824.delegated_oidc_compatibility": true + }, + { + "type": "m.login.token" + } + ] + } + "###); + } + + /// Test the cases where the body is invalid + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_bad_body(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // No/empty body + let request = Request::post("/_matrix/client/v3/login").empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + + insta::assert_json_snapshot!(body, @r#" + { + "errcode": "M_NOT_JSON", + "error": "Body is not a valid JSON document" + } + "#); + + // Missing keys in body + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({})); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_BAD_JSON", + "error": "JSON fields are not valid" + } + "###); + + // Invalid JSON + let request = Request::post("/_matrix/client/v3/login") + .header("Content-Type", "application/json") + .body("{".to_owned()) + .unwrap(); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_NOT_JSON", + "error": "Body is not a valid JSON document" + } + "###); + } + + /// Test that the server doesn't allow login with a password if the password + /// manager is disabled + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_disabled(pool: PgPool) { + setup(); + let state = TestState::from_pool_with_site_config( + pool, + SiteConfig { + password_login_enabled: false, + ..test_site_config() + }, + ) + .await + .unwrap(); + + // Now let's get the login flows + let request = Request::get("/_matrix/client/v3/login").empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + + insta::assert_json_snapshot!(body, @r###" + { + "flows": [ + { + "type": "m.login.sso", + "oauth_aware_preferred": true, + "org.matrix.msc3824.delegated_oidc_compatibility": true + }, + { + "type": "m.login.token" + } + ] + } + "###); + + // Try to login with a password, it should be rejected + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "alice", + }, + "password": "password", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_UNKNOWN", + "error": "Invalid login type" + } + "###); + } + + async fn user_with_password( + state: &TestState, + username: &str, + password: &str, + locked: bool, + ) -> User { + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + + let user = repo + .user() + .add(&mut rng, &state.clock, username.to_owned()) + .await + .unwrap(); + let (version, hash) = state + .password_manager + .hash(&mut rng, Zeroizing::new(password.to_owned())) + .await + .unwrap(); + + repo.user_password() + .add(&mut rng, &state.clock, &user, version, hash, None) + .await + .unwrap(); + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + + let user = if locked { + repo.user().lock(&state.clock, user).await.unwrap() + } else { + user + }; + + repo.save().await.unwrap(); + user + } + + /// Test that a user can login with a password using the Matrix + /// compatibility API. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_user_password_login(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let user = user_with_password(&state, "alice", "password", true).await; + + // Now let's try to login with the password, without asking for a refresh token. + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "alice", + }, + "password": "password", + })); + + // First try to login to a locked account + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::UNAUTHORIZED); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_USER_LOCKED", + "error": "User account has been locked" + } + "###); + + // Now try again after unlocking the account + let mut repo = state.repository().await.unwrap(); + let user = repo.user().unlock(user).await.unwrap(); + repo.save().await.unwrap(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "access_token": "mct_cxG6gZXyvelQWW9XqfNbm5KAQovodf_XvJz43", + "device_id": "42oTpLoieH", + "user_id": "@alice:example.com" + } + "###); + + // Do the same, but this time ask for a refresh token. + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "alice", + }, + "password": "password", + "refresh_token": true, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "access_token": "mct_PGMLvvMXC4Ds1A3lCWc6Hx4l9DGzqG_lVEIV2", + "device_id": "Yp7FM44zJN", + "user_id": "@alice:example.com", + "refresh_token": "mcr_LoYqtrtBUBcWlE4RX6o47chBCGkadB_9gzpc1", + "expires_in_ms": 300000 + } + "###); + + // Try logging in with the 'user' property + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "user": "alice", + "password": "password", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "access_token": "mct_Xl3bbpfh9yNy9NzuRxyR3b3PLW0rqd_DiXAH2", + "device_id": "6cq7FqNSYo", + "user_id": "@alice:example.com" + } + "###); + + // Reset the state, to reset rate limits + let state = state.reset().await; + + // Try to login with a wrong password. + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "alice", + }, + "password": "wrongpassword", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_FORBIDDEN", + "error": "Invalid username/password" + } + "###); + + // Try to login with a wrong username. + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "bob", + }, + "password": "wrongpassword", + })); + + let old_body = body; + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + + // The response should be the same as the previous one, so that we don't leak if + // it's the user that is invalid or the password. + assert_eq!(body, old_body); + + // Try to login to a deactivated account + let mut repo = state.repository().await.unwrap(); + let user = repo.user().deactivate(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "alice", + }, + "password": "password", + })); + + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_FORBIDDEN", + "error": "Invalid username/password" + } + "###); + + // Should get the same error if the deactivated user is also locked + let mut repo = state.repository().await.unwrap(); + let _user = repo.user().lock(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_FORBIDDEN", + "error": "Invalid username/password" + } + "###); + } + + /// Test that we can send a login request without a Content-Type header + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_no_content_type(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + user_with_password(&state, "alice", "password", false).await; + // Try without a Content-Type header + let mut request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "alice", + }, + "password": "password", + })); + request.headers_mut().remove(hyper::header::CONTENT_TYPE); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "access_token": "mct_16tugBE5Ta9LIWoSJaAEHHq2g3fx8S_alcBB4", + "device_id": "ZGpSvYQqlq", + "user_id": "@alice:example.com" + } + "###); + } + + /// Test that a user can login with a password using the Matrix + /// compatibility API, using a MXID as identifier + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_user_password_login_mxid(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let user = user_with_password(&state, "alice", "password", true).await; + + // Login with a full MXID as identifier + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "@alice:example.com", + }, + "password": "password", + })); + + // First try to login to a locked account + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::UNAUTHORIZED); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_USER_LOCKED", + "error": "User account has been locked" + } + "###); + + // Now try again after unlocking the account + let mut repo = state.repository().await.unwrap(); + let _ = repo.user().unlock(user).await.unwrap(); + repo.save().await.unwrap(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "access_token": "mct_cxG6gZXyvelQWW9XqfNbm5KAQovodf_XvJz43", + "device_id": "42oTpLoieH", + "user_id": "@alice:example.com" + } + "###); + + // With a MXID, but with the wrong server name + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "@alice:something.corp", + }, + "password": "password", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_FORBIDDEN", + "error": "Invalid username/password" + } + "###); + } + + /// Test that password logins are rate limited. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_login_rate_limit(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Let's provision a user without a password. This should be enough to trigger + // the rate limit. + let mut repo = state.repository().await.unwrap(); + + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Now let's try to login with the password, without asking for a refresh token. + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.user", + "user": "alice", + }, + "password": "password", + })); + + // First three attempts should just tell about the invalid credentials + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::FORBIDDEN); + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::FORBIDDEN); + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::FORBIDDEN); + + // The fourth attempt should be rate limited + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::TOO_MANY_REQUESTS); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_LIMIT_EXCEEDED", + "error": "Too many login attempts" + } + "###); + } + + /// Test the response of an unsupported password identifier. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unsupported_login_identifier(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Try to login with an unsupported login flow. + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.password", + "identifier": { + "type": "m.id.email", + "user": "user@example.com" + }, + "password": "password" + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_UNKNOWN", + "error": "Unsupported login identifier" + } + "###); + } + + /// Test the response of an unsupported login flow. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unsupported_login(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Try to login with an unsupported login flow. + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.unsupported", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_UNKNOWN", + "error": "Invalid login type" + } + "###); + } + + /// Test `m.login.token` login flow. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_login_token_login(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a user + let mut repo = state.repository().await.unwrap(); + + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + // Start with a locked account + let user = repo.user().lock(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + + // First try with an invalid token + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.token", + "token": "someinvalidtoken", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_FORBIDDEN", + "error": "Invalid login token" + } + "###); + + let token = get_login_token(&state, &user).await; + + // Try to login with the token. + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.token", + "token": token, + })); + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::UNAUTHORIZED); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_USER_LOCKED", + "error": "User account has been locked" + } + "###); + + // Now try again after unlocking the account + let mut repo = state.repository().await.unwrap(); + let user = repo.user().unlock(user).await.unwrap(); + repo.save().await.unwrap(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r#" + { + "access_token": "mct_bUTa4XIh92RARTPTjqQrCZLAkq2ild_0VsYE6", + "device_id": "uihy4bk51g", + "user_id": "@alice:example.com" + } + "#); + + // Try again with the same token, it should fail. + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.token", + "token": token, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_FORBIDDEN", + "error": "Invalid login token" + } + "###); + + // Try to login, but wait too long before sending the request. + let token = get_login_token(&state, &user).await; + + // Advance the clock to make the token expire. + state + .clock + .advance(Duration::microseconds(60 * 1000 * 1000)); + + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.token", + "token": token, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_FORBIDDEN", + "error": "Login token expired" + } + "###); + + // Try to login to a deactivated account + let token = get_login_token(&state, &user).await; + + let mut repo = state.repository().await.unwrap(); + let user = repo.user().deactivate(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + let request = Request::post("/_matrix/client/v3/login").json(serde_json::json!({ + "type": "m.login.token", + "token": token, + })); + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_FORBIDDEN", + "error": "Invalid login token" + } + "###); + + // Should get the same error if the deactivated user is also locked + let mut repo = state.repository().await.unwrap(); + let _user = repo.user().lock(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + let body: serde_json::Value = response.json(); + insta::assert_json_snapshot!(body, @r###" + { + "errcode": "M_FORBIDDEN", + "error": "Invalid login token" + } + "###); + } + + /// Get a login token for a user. + /// Returns the device and the token. + /// + /// # Panics + /// + /// Panics if the repository fails. + async fn get_login_token(state: &TestState, user: &User) -> String { + // XXX: This is a bit manual, but this is what basically the SSO login flow + // does. + let mut repo = state.repository().await.unwrap(); + + // Generate a token randomly + let token = Alphanumeric.sample_string(&mut state.rng(), 32); + + // Start a compat SSO login flow + let login = repo + .compat_sso_login() + .add( + &mut state.rng(), + &state.clock, + token.clone(), + "http://example.com/".parse().unwrap(), + ) + .await + .unwrap(); + + // Advance the flow by fulfilling it with a browser session + let browser_session = repo + .browser_session() + .add(&mut state.rng(), &state.clock, user, None) + .await + .unwrap(); + let _login = repo + .compat_sso_login() + .fulfill(&state.clock, login, &browser_session) + .await + .unwrap(); + + repo.save().await.unwrap(); + + token + } +} diff --git a/matrix-authentication-service/crates/handlers/src/compat/login_sso_complete.rs b/matrix-authentication-service/crates/handlers/src/compat/login_sso_complete.rs new file mode 100644 index 00000000..df059cd3 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/compat/login_sso_complete.rs @@ -0,0 +1,303 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use anyhow::Context; +use axum::{ + extract::{Form, Path, State}, + response::{Html, IntoResponse, Redirect, Response}, +}; +use axum_extra::{TypedHeader, extract::Query}; +use chrono::Duration; +use hyper::StatusCode; +use mas_axum_utils::{ + InternalError, + cookies::CookieJar, + csrf::{CsrfExt, ProtectedForm}, +}; +use mas_data_model::{BoxClock, BoxRng, Clock, MatrixUser}; +use mas_matrix::HomeserverConnection; +use mas_policy::{Policy, model::CompatLogin}; +use mas_router::{CompatLoginSsoAction, UrlBuilder}; +use mas_storage::{BoxRepository, RepositoryAccess, compat::CompatSsoLoginRepository}; +use mas_templates::{ + CompatLoginPolicyViolationContext, CompatSsoContext, ErrorContext, TemplateContext, Templates, +}; +use serde::Deserialize; +use ulid::Ulid; + +use crate::{ + BoundActivityTracker, PreferredLanguage, + session::{SessionOrFallback, count_user_sessions_for_limiting, load_session_or_fallback}, +}; + +#[derive(Debug, Deserialize)] +pub struct Params { + action: Option, +} + +#[tracing::instrument( + name = "handlers.compat.login_sso_complete.get", + fields(compat_sso_login.id = %id), + skip_all, +)] +pub async fn get( + PreferredLanguage(locale): PreferredLanguage, + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + State(templates): State, + State(url_builder): State, + State(homeserver): State>, + mut policy: Policy, + activity_tracker: BoundActivityTracker, + user_agent: Option>, + cookie_jar: CookieJar, + Path(id): Path, + Query(params): Query, +) -> Result { + let user_agent = user_agent.map(|ua| ua.to_string()); + + let (cookie_jar, maybe_session) = match load_session_or_fallback( + cookie_jar, &clock, &mut rng, &templates, &locale, &mut repo, + ) + .await? + { + SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session, + .. + } => (cookie_jar, maybe_session), + SessionOrFallback::Fallback { response } => return Ok(response), + }; + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let Some(session) = maybe_session else { + // If there is no session, redirect to the login or register screen + let url = match params.action { + Some(CompatLoginSsoAction::Register) => { + url_builder.redirect(&mas_router::Register::and_continue_compat_sso_login(id)) + } + Some(CompatLoginSsoAction::Login | CompatLoginSsoAction::Unknown) | None => { + url_builder.redirect(&mas_router::Login::and_continue_compat_sso_login(id)) + } + }; + + return Ok((cookie_jar, url).into_response()); + }; + + let login = repo + .compat_sso_login() + .lookup(id) + .await? + .context("Could not find compat SSO login") + .map_err(InternalError::from_anyhow)?; + + // Bail out if that login session is more than 30min old + if clock.now() > login.created_at + Duration::microseconds(30 * 60 * 1000 * 1000) { + let ctx = ErrorContext::new() + .with_code("compat_sso_login_expired") + .with_description("This login session expired.".to_owned()) + .with_language(&locale); + + let content = templates.render_error(&ctx)?; + return Ok((cookie_jar, Html(content)).into_response()); + } + + let session_counts = count_user_sessions_for_limiting(&mut repo, &session.user).await?; + + // We can close the repository early, we don't need it at this point + repo.save().await?; + + let res = policy + .evaluate_compat_login(mas_policy::CompatLoginInput { + user: &session.user, + login: CompatLogin::Sso { + redirect_uri: login.redirect_uri.to_string(), + }, + // We don't know if there's going to be a replacement until we received the device ID, + // which happens too late. + session_replaced: false, + session_counts, + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent, + }, + }) + .await?; + if !res.valid() { + let ctx = CompatLoginPolicyViolationContext::for_violations(res.violations) + .with_session(session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_compat_login_policy_violation(&ctx)?; + + return Ok((StatusCode::FORBIDDEN, cookie_jar, Html(content)).into_response()); + } + + // Fetch informations about the user. This is purely cosmetic, so we let it + // fail and put a 1s timeout to it in case we fail to query it + // XXX: we're likely to need this in other places + let localpart = &session.user.username; + let display_name = match tokio::time::timeout( + std::time::Duration::from_secs(1), + homeserver.query_user(localpart), + ) + .await + { + Ok(Ok(user)) => user.displayname, + Ok(Err(err)) => { + tracing::warn!( + error = &*err as &dyn std::error::Error, + localpart, + "Failed to query user" + ); + None + } + Err(_) => { + tracing::warn!(localpart, "Timed out while querying user"); + None + } + }; + + let matrix_user = MatrixUser { + mxid: homeserver.mxid(localpart), + display_name, + }; + + let ctx = CompatSsoContext::new(login, matrix_user) + .with_session(session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_sso_login(&ctx)?; + + Ok((cookie_jar, Html(content)).into_response()) +} + +#[tracing::instrument( + name = "handlers.compat.login_sso_complete.post", + fields(compat_sso_login.id = %id), + skip_all, +)] +pub async fn post( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + mut policy: Policy, + activity_tracker: BoundActivityTracker, + user_agent: Option>, + cookie_jar: CookieJar, + Path(id): Path, + Query(params): Query, + Form(form): Form>, +) -> Result { + let user_agent = user_agent.map(|ua| ua.to_string()); + + let (cookie_jar, maybe_session) = match load_session_or_fallback( + cookie_jar, &clock, &mut rng, &templates, &locale, &mut repo, + ) + .await? + { + SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session, + .. + } => (cookie_jar, maybe_session), + SessionOrFallback::Fallback { response } => return Ok(response), + }; + + cookie_jar.verify_form(&clock, form)?; + + let Some(session) = maybe_session else { + // If there is no session, redirect to the login or register screen + let url = match params.action { + Some(CompatLoginSsoAction::Register) => { + url_builder.redirect(&mas_router::Register::and_continue_compat_sso_login(id)) + } + Some(CompatLoginSsoAction::Login | CompatLoginSsoAction::Unknown) | None => { + url_builder.redirect(&mas_router::Login::and_continue_compat_sso_login(id)) + } + }; + + return Ok((cookie_jar, url).into_response()); + }; + + let login = repo + .compat_sso_login() + .lookup(id) + .await? + .context("Could not find compat SSO login") + .map_err(InternalError::from_anyhow)?; + + // Bail out if that login session isn't pending, or is more than 30min old + if !login.is_pending() + || clock.now() > login.created_at + Duration::microseconds(30 * 60 * 1000 * 1000) + { + let ctx = ErrorContext::new() + .with_code("compat_sso_login_expired") + .with_description("This login session expired.".to_owned()) + .with_language(&locale); + + let content = templates.render_error(&ctx)?; + return Ok((cookie_jar, Html(content)).into_response()); + } + + let redirect_uri = { + let mut redirect_uri = login.redirect_uri.clone(); + redirect_uri + .query_pairs_mut() + .append_pair("loginToken", &login.login_token); + redirect_uri + }; + + let session_counts = count_user_sessions_for_limiting(&mut repo, &session.user).await?; + + let res = policy + .evaluate_compat_login(mas_policy::CompatLoginInput { + user: &session.user, + login: CompatLogin::Sso { + redirect_uri: login.redirect_uri.to_string(), + }, + session_counts, + // We don't know if there's going to be a replacement until we received the device ID, + // which happens too late. + session_replaced: false, + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent, + }, + }) + .await?; + + if !res.valid() { + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let ctx = CompatLoginPolicyViolationContext::for_violations(res.violations) + .with_session(session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_compat_login_policy_violation(&ctx)?; + + return Ok((StatusCode::FORBIDDEN, cookie_jar, Html(content)).into_response()); + } + + // Note that if the login is not Pending, + // this fails and aborts the transaction. + repo.compat_sso_login() + .fulfill(&clock, login, &session) + .await?; + + repo.save().await?; + + Ok((cookie_jar, Redirect::to(redirect_uri.as_str())).into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/compat/login_sso_redirect.rs b/matrix-authentication-service/crates/handlers/src/compat/login_sso_redirect.rs new file mode 100644 index 00000000..8edb868f --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/compat/login_sso_redirect.rs @@ -0,0 +1,156 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{extract::State, response::IntoResponse}; +use axum_extra::extract::Query; +use hyper::StatusCode; +use mas_axum_utils::{GenericError, InternalError}; +use mas_data_model::{BoxClock, BoxRng}; +use mas_router::{CompatLoginSsoAction, CompatLoginSsoComplete, UrlBuilder}; +use mas_storage::{BoxRepository, compat::CompatSsoLoginRepository}; +use rand::distributions::{Alphanumeric, DistString}; +use serde::Deserialize; +use thiserror::Error; +use url::Url; + +use crate::impl_from_error_for_route; + +#[derive(Debug, Deserialize)] +pub struct Params { + #[serde(rename = "redirectUrl")] + redirect_url: Option, + + action: Option, + + #[serde(rename = "org.matrix.msc3824.action")] + unstable_action: Option, +} + +impl Params { + fn action(&self) -> Option { + self.action + .filter(CompatLoginSsoAction::is_known) + .or(self.unstable_action.filter(CompatLoginSsoAction::is_known)) + } +} + +#[derive(Debug, Error)] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Missing redirectUrl")] + MissingRedirectUrl, + + #[error("invalid redirectUrl")] + InvalidRedirectUrl, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + match self { + Self::Internal(e) => InternalError::new(e).into_response(), + Self::MissingRedirectUrl | Self::InvalidRedirectUrl => { + GenericError::new(StatusCode::BAD_REQUEST, self).into_response() + } + } + } +} + +#[tracing::instrument(name = "handlers.compat.login_sso_redirect.get", skip_all)] +pub async fn get( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + State(url_builder): State, + Query(params): Query, +) -> Result { + let action = params.action(); + + // Check the redirectUrl parameter + let redirect_url = params.redirect_url.ok_or(RouteError::MissingRedirectUrl)?; + let redirect_url = Url::parse(&redirect_url).map_err(|_| RouteError::InvalidRedirectUrl)?; + + // Do not allow URLs with username or passwords in them + if !redirect_url.username().is_empty() || redirect_url.password().is_some() { + return Err(RouteError::InvalidRedirectUrl); + } + + // On the http/https scheme, verify the URL has a host + if matches!(redirect_url.scheme(), "http" | "https") && !redirect_url.has_host() { + return Err(RouteError::InvalidRedirectUrl); + } + + let token = Alphanumeric.sample_string(&mut rng, 32); + let login = repo + .compat_sso_login() + .add(&mut rng, &clock, token, redirect_url) + .await?; + + repo.save().await?; + + Ok(url_builder.absolute_redirect(&CompatLoginSsoComplete::new(login.id, action))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unstable_action_fallback(pool: PgPool) { + let state: TestState = TestState::from_pool(pool).await.unwrap(); + + let request = Request::get( + "/_matrix/client/v3/login/sso/redirect?\ + redirectUrl=http://example.com/\ + &org.matrix.msc3824.action=register", + ) + .empty(); + + let response = state.request(request).await; + + response.assert_status(StatusCode::SEE_OTHER); + + let location = response + .headers() + .get("Location") + .unwrap() + .to_str() + .unwrap(); + assert!(location.contains("org.matrix.msc3824.action=register")); + assert!(location.contains("action=register")); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unknown_action(pool: PgPool) { + let state: TestState = TestState::from_pool(pool).await.unwrap(); + + let request = Request::get( + "/_matrix/client/v3/login/sso/redirect?\ + redirectUrl=http://example.com/\ + &org.matrix.msc3824.action=undefinedaction", + ) + .empty(); + + let response = state.request(request).await; + + response.assert_status(StatusCode::SEE_OTHER); + + let location = response + .headers() + .get("Location") + .unwrap() + .to_str() + .unwrap(); + assert!(!location.contains("org.matrix.msc3824.action")); + assert!(!location.contains("action")); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/compat/logout.rs b/matrix-authentication-service/crates/handlers/src/compat/logout.rs new file mode 100644 index 00000000..4642cc54 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/compat/logout.rs @@ -0,0 +1,132 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::LazyLock; + +use axum::{Json, response::IntoResponse}; +use axum_extra::typed_header::TypedHeader; +use headers::{Authorization, authorization::Bearer}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::{BoxClock, BoxRng, Clock, TokenType}; +use mas_storage::{ + BoxRepository, RepositoryAccess, + compat::{CompatAccessTokenRepository, CompatSessionRepository}, + queue::{QueueJobRepositoryExt as _, SyncDevicesJob}, +}; +use opentelemetry::{Key, KeyValue, metrics::Counter}; +use thiserror::Error; + +use super::MatrixError; +use crate::{BoundActivityTracker, METER, impl_from_error_for_route}; + +static LOGOUT_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.compat.logout_request") + .with_description("How many compatibility logout request have happened") + .with_unit("{request}") + .build() +}); +const RESULT: Key = Key::from_static_str("result"); + +#[derive(Error, Debug)] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Missing access token")] + MissingAuthorization, + + #[error("Invalid token format")] + TokenFormat(#[from] mas_data_model::TokenFormatError), + + #[error("Invalid access token")] + InvalidAuthorization, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!(self, Self::Internal(_)); + LOGOUT_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + let response = match self { + Self::Internal(_) => MatrixError { + errcode: "M_UNKNOWN", + error: "Internal error", + status: StatusCode::INTERNAL_SERVER_ERROR, + }, + Self::MissingAuthorization => MatrixError { + errcode: "M_MISSING_TOKEN", + error: "Missing access token", + status: StatusCode::UNAUTHORIZED, + }, + Self::InvalidAuthorization | Self::TokenFormat(_) => MatrixError { + errcode: "M_UNKNOWN_TOKEN", + error: "Invalid access token", + status: StatusCode::UNAUTHORIZED, + }, + }; + + (sentry_event_id, response).into_response() + } +} + +#[tracing::instrument(name = "handlers.compat.logout.post", skip_all)] +pub(crate) async fn post( + clock: BoxClock, + mut rng: BoxRng, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + maybe_authorization: Option>>, +) -> Result { + let TypedHeader(authorization) = maybe_authorization.ok_or(RouteError::MissingAuthorization)?; + + let token = authorization.token(); + let token_type = TokenType::check(token)?; + + if token_type != TokenType::CompatAccessToken { + return Err(RouteError::InvalidAuthorization); + } + + let token = repo + .compat_access_token() + .find_by_token(token) + .await? + .filter(|t| t.is_valid(clock.now())) + .ok_or(RouteError::InvalidAuthorization)?; + + let session = repo + .compat_session() + .lookup(token.session_id) + .await? + .filter(|s| s.is_valid()) + .ok_or(RouteError::InvalidAuthorization)?; + + activity_tracker + .record_compat_session(&clock, &session) + .await; + + let user = repo + .user() + .lookup(session.user_id) + .await? + // XXX: this is probably not the right error + .ok_or(RouteError::InvalidAuthorization)?; + + // Schedule a job to sync the devices of the user with the homeserver + repo.queue_job() + .schedule_job(&mut rng, &clock, SyncDevicesJob::new(&user)) + .await?; + + repo.compat_session().finish(&clock, session).await?; + + repo.save().await?; + + LOGOUT_COUNTER.add(1, &[KeyValue::new(RESULT, "success")]); + + Ok(Json(serde_json::json!({}))) +} diff --git a/matrix-authentication-service/crates/handlers/src/compat/logout_all.rs b/matrix-authentication-service/crates/handlers/src/compat/logout_all.rs new file mode 100644 index 00000000..f2ec10a8 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/compat/logout_all.rs @@ -0,0 +1,201 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::LazyLock; + +use axum::{Json, response::IntoResponse}; +use axum_extra::typed_header::TypedHeader; +use headers::{Authorization, authorization::Bearer}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::{BoxClock, BoxRng, Clock, TokenType}; +use mas_storage::{ + BoxRepository, RepositoryAccess, + compat::{CompatAccessTokenRepository, CompatSessionFilter, CompatSessionRepository}, + queue::{QueueJobRepositoryExt as _, SyncDevicesJob}, +}; +use opentelemetry::{Key, KeyValue, metrics::Counter}; +use serde::Deserialize; +use thiserror::Error; +use tracing::info; +use ulid::Ulid; + +use super::{MatrixError, MatrixJsonBody}; +use crate::{BoundActivityTracker, METER, impl_from_error_for_route}; + +static LOGOUT_ALL_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.compat.logout_all_request") + .with_description( + "How many request to the /logout/all compatibility endpoint have happened", + ) + .with_unit("{request}") + .build() +}); +const RESULT: Key = Key::from_static_str("result"); + +#[derive(Error, Debug)] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Can't load session {0}")] + CantLoadSession(Ulid), + + #[error("Can't load user {0}")] + CantLoadUser(Ulid), + + #[error("Token {0} has expired")] + InvalidToken(Ulid), + + #[error("Session {0} has been revoked")] + InvalidSession(Ulid), + + #[error("User {0} is locked or deactivated")] + InvalidUser(Ulid), + + #[error("/logout/all is not supported")] + NotSupported, + + #[error("Missing access token")] + MissingAuthorization, + + #[error("Invalid token format")] + TokenFormat(#[from] mas_data_model::TokenFormatError), + + #[error("Access token is not a compatibility access token")] + NotACompatToken, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!( + self, + Self::Internal(_) | Self::CantLoadSession(_) | Self::CantLoadUser(_) + ); + + // We track separately if the endpoint was called without the custom + // parameter, so that we know if clients are using this endpoint in the + // wild + if matches!(self, Self::NotSupported) { + LOGOUT_ALL_COUNTER.add(1, &[KeyValue::new(RESULT, "not_supported")]); + } else { + LOGOUT_ALL_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + } + + let response = match self { + Self::Internal(_) | Self::CantLoadSession(_) | Self::CantLoadUser(_) => MatrixError { + errcode: "M_UNKNOWN", + error: "Internal error", + status: StatusCode::INTERNAL_SERVER_ERROR, + }, + Self::MissingAuthorization => MatrixError { + errcode: "M_MISSING_TOKEN", + error: "Missing access token", + status: StatusCode::UNAUTHORIZED, + }, + Self::InvalidUser(_) + | Self::InvalidSession(_) + | Self::InvalidToken(_) + | Self::NotACompatToken + | Self::TokenFormat(_) => MatrixError { + errcode: "M_UNKNOWN_TOKEN", + error: "Invalid access token", + status: StatusCode::UNAUTHORIZED, + }, + Self::NotSupported => MatrixError { + errcode: "M_UNRECOGNIZED", + error: "The /logout/all endpoint is not supported by this deployment", + status: StatusCode::NOT_FOUND, + }, + }; + + (sentry_event_id, response).into_response() + } +} + +#[derive(Deserialize, Default)] +pub(crate) struct RequestBody { + #[serde(rename = "io.element.only_compat_is_fine", default)] + only_compat_is_fine: bool, +} + +#[tracing::instrument(name = "handlers.compat.logout_all.post", skip_all)] +pub(crate) async fn post( + clock: BoxClock, + mut rng: BoxRng, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + maybe_authorization: Option>>, + input: Option>, +) -> Result { + let MatrixJsonBody(input) = input.unwrap_or_default(); + let TypedHeader(authorization) = maybe_authorization.ok_or(RouteError::MissingAuthorization)?; + + let token = authorization.token(); + let token_type = TokenType::check(token)?; + + if token_type != TokenType::CompatAccessToken { + return Err(RouteError::NotACompatToken); + } + + let token = repo + .compat_access_token() + .find_by_token(token) + .await? + .ok_or(RouteError::NotACompatToken)?; + + if !token.is_valid(clock.now()) { + return Err(RouteError::InvalidToken(token.id)); + } + + let session = repo + .compat_session() + .lookup(token.session_id) + .await? + .ok_or(RouteError::CantLoadSession(token.session_id))?; + + if !session.is_valid() { + return Err(RouteError::InvalidSession(session.id)); + } + + activity_tracker + .record_compat_session(&clock, &session) + .await; + + let user = repo + .user() + .lookup(session.user_id) + .await? + .ok_or(RouteError::CantLoadUser(session.user_id))?; + + if !user.is_valid() { + return Err(RouteError::InvalidUser(session.user_id)); + } + + if !input.only_compat_is_fine { + return Err(RouteError::NotSupported); + } + + let filter = CompatSessionFilter::new().for_user(&user).active_only(); + let affected_sessions = repo.compat_session().finish_bulk(&clock, filter).await?; + info!( + "Logged out {affected_sessions} sessions for user {user_id}", + user_id = user.id + ); + + // Schedule a job to sync the devices of the user with the homeserver + repo.queue_job() + .schedule_job(&mut rng, &clock, SyncDevicesJob::new(&user)) + .await?; + + repo.save().await?; + + LOGOUT_ALL_COUNTER.add(1, &[KeyValue::new(RESULT, "success")]); + + Ok(Json(serde_json::json!({}))) +} diff --git a/matrix-authentication-service/crates/handlers/src/compat/mod.rs b/matrix-authentication-service/crates/handlers/src/compat/mod.rs new file mode 100644 index 00000000..dd5600af --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/compat/mod.rs @@ -0,0 +1,172 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + Json, + body::Bytes, + extract::{ + Request, + rejection::{BytesRejection, FailedToBufferBody}, + }, + response::IntoResponse, +}; +use hyper::{StatusCode, header}; +use mas_axum_utils::record_error; +use serde::{Serialize, de::DeserializeOwned}; +use thiserror::Error; + +pub(crate) mod login; +pub(crate) mod login_sso_complete; +pub(crate) mod login_sso_redirect; +pub(crate) mod logout; +pub(crate) mod logout_all; +pub(crate) mod refresh; + +#[cfg(test)] +mod tests; + +#[derive(Debug, Serialize)] +struct MatrixError { + errcode: &'static str, + error: &'static str, + #[serde(skip)] + status: StatusCode, +} + +impl IntoResponse for MatrixError { + fn into_response(self) -> axum::response::Response { + (self.status, Json(self)).into_response() + } +} + +#[derive(Debug, Clone, Copy, Default)] +#[must_use] +pub struct MatrixJsonBody(pub T); + +#[derive(Debug, Error)] +pub enum MatrixJsonBodyRejection { + #[error("Invalid Content-Type header: expected application/json")] + InvalidContentType, + + #[error("Invalid Content-Type header: expected application/json, got {0}")] + ContentTypeNotJson(mime::Mime), + + #[error("Failed to read request body")] + BytesRejection(#[from] BytesRejection), + + #[error("Invalid JSON document")] + Json(#[from] serde_json::Error), +} + +impl IntoResponse for MatrixJsonBodyRejection { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!(self, !); + let response = match self { + Self::InvalidContentType | Self::ContentTypeNotJson(_) => MatrixError { + errcode: "M_NOT_JSON", + error: "Invalid Content-Type header: expected application/json", + status: StatusCode::BAD_REQUEST, + }, + + Self::BytesRejection(BytesRejection::FailedToBufferBody( + FailedToBufferBody::LengthLimitError(_), + )) => MatrixError { + errcode: "M_TOO_LARGE", + error: "Request body too large", + status: StatusCode::PAYLOAD_TOO_LARGE, + }, + + Self::BytesRejection(BytesRejection::FailedToBufferBody( + FailedToBufferBody::UnknownBodyError(_), + )) => MatrixError { + errcode: "M_UNKNOWN", + error: "Failed to read request body", + status: StatusCode::BAD_REQUEST, + }, + + Self::BytesRejection(_) => MatrixError { + errcode: "M_UNKNOWN", + error: "Unknown error while reading request body", + status: StatusCode::BAD_REQUEST, + }, + + Self::Json(err) if err.is_data() => MatrixError { + errcode: "M_BAD_JSON", + error: "JSON fields are not valid", + status: StatusCode::BAD_REQUEST, + }, + + Self::Json(_) => MatrixError { + errcode: "M_NOT_JSON", + error: "Body is not a valid JSON document", + status: StatusCode::BAD_REQUEST, + }, + }; + + (sentry_event_id, response).into_response() + } +} + +impl axum::extract::FromRequest for MatrixJsonBody +where + T: DeserializeOwned, + S: Send + Sync, +{ + type Rejection = MatrixJsonBodyRejection; + + async fn from_request(req: Request, state: &S) -> Result { + // Matrix spec says it's optional to send a Content-Type header, so we + // only check it if it's present + if let Some(content_type) = req.headers().get(header::CONTENT_TYPE) { + let Ok(content_type) = content_type.to_str() else { + return Err(MatrixJsonBodyRejection::InvalidContentType); + }; + + let Ok(mime) = content_type.parse::() else { + return Err(MatrixJsonBodyRejection::InvalidContentType); + }; + + let is_json_content_type = mime.type_() == "application" + && (mime.subtype() == "json" || mime.suffix().is_some_and(|name| name == "json")); + + if !is_json_content_type { + return Err(MatrixJsonBodyRejection::ContentTypeNotJson(mime)); + } + } + + let bytes = Bytes::from_request(req, state).await?; + + let value: T = serde_json::from_slice(&bytes)?; + + Ok(Self(value)) + } +} + +impl axum::extract::OptionalFromRequest for MatrixJsonBody +where + T: DeserializeOwned, + S: Send + Sync, +{ + type Rejection = MatrixJsonBodyRejection; + + async fn from_request(req: Request, state: &S) -> Result, Self::Rejection> { + if req.headers().contains_key(header::CONTENT_TYPE) { + // If there is a Content-Type header, handle it as normal + let result = >::from_request(req, state).await?; + return Ok(Some(result)); + } + + // Else, we poke at the body, and deserialize it only if it's JSON + let bytes = >::from_request(req, state).await?; + if bytes.is_empty() { + return Ok(None); + } + + let value: T = serde_json::from_slice(&bytes)?; + + Ok(Some(Self(value))) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/compat/refresh.rs b/matrix-authentication-service/crates/handlers/src/compat/refresh.rs new file mode 100644 index 00000000..dabdaefe --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/compat/refresh.rs @@ -0,0 +1,175 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{Json, extract::State, response::IntoResponse}; +use chrono::Duration; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::{BoxClock, BoxRng, Clock, SiteConfig, TokenFormatError, TokenType}; +use mas_storage::{ + BoxRepository, + compat::{CompatAccessTokenRepository, CompatRefreshTokenRepository, CompatSessionRepository}, +}; +use serde::{Deserialize, Serialize}; +use serde_with::{DurationMilliSeconds, serde_as}; +use thiserror::Error; +use ulid::Ulid; + +use super::MatrixError; +use crate::{BoundActivityTracker, impl_from_error_for_route}; + +#[derive(Debug, Deserialize)] +pub struct RequestBody { + refresh_token: String, +} + +#[derive(Debug, Error)] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("invalid token")] + InvalidToken(#[from] TokenFormatError), + + #[error("unknown token")] + UnknownToken, + + #[error("invalid token type {0}, expected a compat refresh token")] + InvalidTokenType(TokenType), + + #[error("refresh token already consumed {0}")] + RefreshTokenConsumed(Ulid), + + #[error("invalid compat session {0}")] + InvalidSession(Ulid), + + #[error("unknown comapt session {0}")] + UnknownSession(Ulid), +} + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!(self, Self::Internal(_) | Self::UnknownSession(_)); + let response = match self { + Self::Internal(_) | Self::UnknownSession(_) => MatrixError { + errcode: "M_UNKNOWN", + error: "Internal error", + status: StatusCode::INTERNAL_SERVER_ERROR, + }, + Self::InvalidToken(_) + | Self::UnknownToken + | Self::InvalidTokenType(_) + | Self::InvalidSession(_) + | Self::RefreshTokenConsumed(_) => MatrixError { + errcode: "M_UNKNOWN_TOKEN", + error: "Invalid refresh token", + status: StatusCode::UNAUTHORIZED, + }, + }; + + (sentry_event_id, response).into_response() + } +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +#[serde_as] +#[derive(Debug, Serialize)] +pub struct ResponseBody { + access_token: String, + refresh_token: String, + #[serde_as(as = "DurationMilliSeconds")] + expires_in_ms: Duration, +} + +#[tracing::instrument(name = "handlers.compat.refresh.post", skip_all)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + State(site_config): State, + Json(input): Json, +) -> Result { + let token_type = TokenType::check(&input.refresh_token)?; + + if token_type != TokenType::CompatRefreshToken { + return Err(RouteError::InvalidTokenType(token_type)); + } + + let refresh_token = repo + .compat_refresh_token() + .find_by_token(&input.refresh_token) + .await? + .ok_or(RouteError::UnknownToken)?; + + if !refresh_token.is_valid() { + return Err(RouteError::RefreshTokenConsumed(refresh_token.id)); + } + + let session = repo + .compat_session() + .lookup(refresh_token.session_id) + .await? + .ok_or(RouteError::UnknownSession(refresh_token.session_id))?; + + if !session.is_valid() { + return Err(RouteError::InvalidSession(refresh_token.session_id)); + } + + activity_tracker + .record_compat_session(&clock, &session) + .await; + + let access_token = repo + .compat_access_token() + .lookup(refresh_token.access_token_id) + .await? + .filter(|t| t.is_valid(clock.now())); + + let new_refresh_token_str = TokenType::CompatRefreshToken.generate(&mut rng); + let new_access_token_str = TokenType::CompatAccessToken.generate(&mut rng); + + let expires_in = site_config.compat_token_ttl; + let new_access_token = repo + .compat_access_token() + .add( + &mut rng, + &clock, + &session, + new_access_token_str, + Some(expires_in), + ) + .await?; + let new_refresh_token = repo + .compat_refresh_token() + .add( + &mut rng, + &clock, + &session, + &new_access_token, + new_refresh_token_str, + ) + .await?; + + repo.compat_refresh_token() + .consume_and_replace(&clock, refresh_token, &new_refresh_token) + .await?; + + if let Some(access_token) = access_token { + repo.compat_access_token() + .expire(&clock, access_token) + .await?; + } + + repo.save().await?; + + Ok(Json(ResponseBody { + access_token: new_access_token.token, + refresh_token: new_refresh_token.token, + expires_in_ms: expires_in, + })) +} diff --git a/matrix-authentication-service/crates/handlers/src/compat/tests.rs b/matrix-authentication-service/crates/handlers/src/compat/tests.rs new file mode 100644 index 00000000..cb6b76ab --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/compat/tests.rs @@ -0,0 +1,233 @@ +// Copyright 2026 Element Creations Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::http::{Request, StatusCode}; +use mas_matrix::{HomeserverConnection, ProvisionRequest}; +use serde::{Deserialize, Serialize}; +use zeroize::Zeroizing; + +use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + +#[derive(Debug, Serialize)] +#[serde(tag = "type")] +enum LoginCredentials { + #[serde(rename = "m.login.password")] + Password { + identifier: LoginIdentifier, + password: String, + }, +} + +#[derive(Debug, Serialize)] +#[serde(tag = "type")] +enum LoginIdentifier { + #[serde(rename = "m.id.user")] + User { user: String }, +} + +#[derive(Debug, Serialize)] +struct LoginRequest { + #[serde(flatten)] + credentials: LoginCredentials, + #[serde(default)] + refresh_token: bool, +} + +#[derive(Debug, Deserialize)] +struct LoginResponse { + #[allow(dead_code)] + access_token: String, + #[allow(dead_code)] + user_id: String, + #[allow(dead_code)] + device_id: Option, + refresh_token: Option, +} + +#[derive(Debug, Serialize)] +struct RefreshRequest { + refresh_token: String, +} + +#[derive(Debug, Deserialize, PartialEq, Eq)] +struct RefreshResponse { + access_token: String, + refresh_token: String, + expires_in_ms: i64, +} + +/// Test using a compatibility refresh token. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_compat_refresh(pool: sqlx::PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Create a user + create_test_user(&state, "testuser").await; + + // Login to get initial tokens + let login_request = Request::post("/_matrix/client/v3/login").json(&LoginRequest { + credentials: LoginCredentials::Password { + identifier: LoginIdentifier::User { + user: "testuser".to_owned(), + }, + password: "password".to_owned(), + }, + refresh_token: true, + }); + + let login_response = state.request(login_request).await; + login_response.assert_status(StatusCode::OK); + + let login_response: LoginResponse = login_response.json(); + let initial_refresh_token = login_response + .refresh_token + .expect("Login should return a refresh token"); + + // First refresh + let refresh_request = Request::post("/_matrix/client/v3/refresh").json(&RefreshRequest { + refresh_token: initial_refresh_token.clone(), + }); + + let first_refresh_response = state.request(refresh_request).await; + first_refresh_response.assert_status(StatusCode::OK); + + let first_refresh_response: RefreshResponse = first_refresh_response.json(); + let first_new_refresh_token = first_refresh_response.refresh_token.clone(); + + assert_eq!( + first_refresh_response, + RefreshResponse { + access_token: "mct_fNbm5KAQovodfVQz7IvDc44woP66fR_fsaiD1".to_owned(), + refresh_token: "mcr_42oTpLoieH5IecxG6gZXyvelQWW9Xq_a8g5N3".to_owned(), + expires_in_ms: 300_000 + } + ); + + // Use the token from the /refresh response to /refresh again, + // proving that it works. + // This is a regression test: we were previously consuming the refresh token + // before it was returned from /refresh. + let second_refresh_request = + Request::post("/_matrix/client/v3/refresh").json(&RefreshRequest { + refresh_token: first_new_refresh_token.clone(), + }); + + let second_refresh_response = state.request(second_refresh_request).await; + second_refresh_response.assert_status(StatusCode::OK); + + let second_refresh_response: RefreshResponse = second_refresh_response.json(); + + assert_eq!( + second_refresh_response, + RefreshResponse { + access_token: "mct_Wc6Hx4l9DGzqGtgLoYqtrtBUBcWlE4_ZFyTp2".to_owned(), + refresh_token: "mcr_Yp7FM44zJN5qePGMLvvMXC4Ds1A3lC_0YcYCM".to_owned(), + expires_in_ms: 300_000 + } + ); +} + +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_refresh_with_invalid_token(pool: sqlx::PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let refresh_request = RefreshRequest { + refresh_token: "invalid_token".to_owned(), + }; + + let refresh_request = Request::post("/_matrix/client/v3/refresh").json(&refresh_request); + + let response = state.request(refresh_request).await; + response.assert_status(StatusCode::UNAUTHORIZED); +} + +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_refresh_with_consumed_token(pool: sqlx::PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Create a user and login + create_test_user(&state, "testuser").await; + + let login_request = LoginRequest { + credentials: LoginCredentials::Password { + identifier: LoginIdentifier::User { + user: "testuser".to_owned(), + }, + password: "password".to_owned(), + }, + refresh_token: true, + }; + + let login_request = Request::post("/_matrix/client/v3/login").json(&login_request); + + let login_response = state.request(login_request).await; + login_response.assert_status(StatusCode::OK); + + let login_response: LoginResponse = login_response.json(); + let refresh_token = login_response + .refresh_token + .expect("Login should return a refresh token"); + + let refresh_request = RefreshRequest { + refresh_token: refresh_token.clone(), + }; + + // Use the refresh token once + let first_refresh_request = Request::post("/_matrix/client/v3/refresh").json(&refresh_request); + let first_refresh_response = state.request(first_refresh_request).await; + first_refresh_response.assert_status(StatusCode::OK); + + let _first_refresh_response: RefreshResponse = first_refresh_response.json(); + + // Try to use the same refresh token again - should fail because it's consumed + let second_refresh_request = Request::post("/_matrix/client/v3/refresh").json(&refresh_request); + + let second_refresh_response = state.request(second_refresh_request).await; + second_refresh_response.assert_status(StatusCode::UNAUTHORIZED); +} + +async fn create_test_user(state: &TestState, username: &str) -> mas_data_model::User { + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + + let user = repo + .user() + .add(&mut rng, &state.clock, username.to_owned()) + .await + .unwrap(); + + let password = Zeroizing::new("password".to_owned()); + let (version, hashed_password) = state + .password_manager + .hash(&mut rng, password) + .await + .unwrap(); + + repo.user_password() + .add( + &mut rng, + &state.clock, + &user, + version, + hashed_password, + None, + ) + .await + .unwrap(); + + // Provision the user on the homeserver + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + + repo.save().await.unwrap(); + + user +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/mod.rs b/matrix-authentication-service/crates/handlers/src/graphql/mod.rs new file mode 100644 index 00000000..7ccf9e51 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/mod.rs @@ -0,0 +1,611 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::module_name_repetitions)] + +use std::{net::IpAddr, ops::Deref, sync::Arc}; + +use async_graphql::{ + EmptySubscription, InputObject, + extensions::Tracing, + http::{GraphQLPlaygroundConfig, MultipartOptions, playground_source}, +}; +use axum::{ + Extension, Json, + body::Body, + extract::{RawQuery, State as AxumState}, + http::StatusCode, + response::{Html, IntoResponse, Response}, +}; +use axum_extra::typed_header::TypedHeader; +use chrono::{DateTime, Utc}; +use futures_util::TryStreamExt; +use headers::{Authorization, ContentType, HeaderValue, authorization::Bearer}; +use hyper::header::CACHE_CONTROL; +use mas_axum_utils::{ + InternalError, SessionInfo, SessionInfoExt, cookies::CookieJar, sentry::SentryEventID, +}; +use mas_data_model::{ + BoxClock, BoxRng, BrowserSession, Clock, Session, SiteConfig, SystemClock, User, +}; +use mas_matrix::HomeserverConnection; +use mas_policy::{InstantiateError, Policy, PolicyFactory}; +use mas_router::UrlBuilder; +use mas_storage::{BoxRepository, BoxRepositoryFactory, RepositoryError}; +use opentelemetry_semantic_conventions::trace::{GRAPHQL_DOCUMENT, GRAPHQL_OPERATION_NAME}; +use rand::{SeedableRng, thread_rng}; +use rand_chacha::ChaChaRng; +use state::has_session_ended; +use tracing::{Instrument, info_span}; +use ulid::Ulid; + +mod model; +mod mutations; +mod query; +mod state; + +pub use self::state::{BoxState, State}; +use self::{ + model::{CreationEvent, Node}, + mutations::Mutation, + query::Query, +}; +use crate::{ + BoundActivityTracker, Limiter, RequesterFingerprint, impl_from_error_for_route, + passwords::PasswordManager, +}; + +#[cfg(test)] +mod tests; + +/// Extra parameters we get from the listener configuration, because they are +/// per-listener options. We pass them through request extensions. +#[derive(Debug, Clone)] +pub struct ExtraRouterParameters { + pub undocumented_oauth2_access: bool, +} + +struct GraphQLState { + repository_factory: BoxRepositoryFactory, + homeserver_connection: Arc, + policy_factory: Arc, + site_config: SiteConfig, + password_manager: PasswordManager, + url_builder: UrlBuilder, + limiter: Limiter, +} + +#[async_trait::async_trait] +impl state::State for GraphQLState { + async fn repository(&self) -> Result { + self.repository_factory.create().await + } + + async fn policy(&self) -> Result { + self.policy_factory.instantiate().await + } + + fn password_manager(&self) -> PasswordManager { + self.password_manager.clone() + } + + fn site_config(&self) -> &SiteConfig { + &self.site_config + } + + fn homeserver_connection(&self) -> &dyn HomeserverConnection { + self.homeserver_connection.as_ref() + } + + fn url_builder(&self) -> &UrlBuilder { + &self.url_builder + } + + fn limiter(&self) -> &Limiter { + &self.limiter + } + + fn clock(&self) -> BoxClock { + let clock = SystemClock::default(); + Box::new(clock) + } + + fn rng(&self) -> BoxRng { + #[allow(clippy::disallowed_methods)] + let rng = thread_rng(); + + let rng = ChaChaRng::from_rng(rng).expect("Failed to seed rng"); + Box::new(rng) + } +} + +#[must_use] +pub fn schema( + repository_factory: BoxRepositoryFactory, + policy_factory: &Arc, + homeserver_connection: impl HomeserverConnection + 'static, + site_config: SiteConfig, + password_manager: PasswordManager, + url_builder: UrlBuilder, + limiter: Limiter, +) -> Schema { + let state = GraphQLState { + repository_factory, + policy_factory: Arc::clone(policy_factory), + homeserver_connection: Arc::new(homeserver_connection), + site_config, + password_manager, + url_builder, + limiter, + }; + let state: BoxState = Box::new(state); + + schema_builder().extension(Tracing).data(state).finish() +} + +fn span_for_graphql_request(request: &async_graphql::Request) -> tracing::Span { + let span = info_span!( + "GraphQL operation", + "otel.name" = tracing::field::Empty, + "otel.kind" = "server", + { GRAPHQL_DOCUMENT } = request.query, + { GRAPHQL_OPERATION_NAME } = tracing::field::Empty, + ); + + if let Some(name) = &request.operation_name { + span.record("otel.name", name); + span.record(GRAPHQL_OPERATION_NAME, name); + } + + span +} + +#[derive(thiserror::Error, Debug)] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("Loading of some database objects failed")] + LoadFailed, + + #[error("Invalid access token")] + InvalidToken, + + #[error("Missing scope")] + MissingScope, + + #[error(transparent)] + ParseRequest(#[from] async_graphql::ParseRequestError), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> Response { + let event_id = sentry::capture_error(&self); + + let response = match self { + e @ (Self::Internal(_) | Self::LoadFailed) => { + let error = async_graphql::Error::new_with_source(e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"errors": [error]})), + ) + .into_response() + } + + Self::InvalidToken => { + let error = async_graphql::Error::new("Invalid token"); + ( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"errors": [error]})), + ) + .into_response() + } + + Self::MissingScope => { + let error = async_graphql::Error::new("Missing urn:mas:graphql:* scope"); + ( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({"errors": [error]})), + ) + .into_response() + } + + Self::ParseRequest(e) => { + let error = async_graphql::Error::new_with_source(e); + ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"errors": [error]})), + ) + .into_response() + } + }; + + (SentryEventID::from(event_id), response).into_response() + } +} + +async fn get_requester( + undocumented_oauth2_access: bool, + clock: &impl Clock, + activity_tracker: &BoundActivityTracker, + mut repo: BoxRepository, + session_info: &SessionInfo, + user_agent: Option, + token: Option<&str>, +) -> Result { + let entity = if let Some(token) = token { + // If we haven't enabled undocumented_oauth2_access on the listener, we bail out + if !undocumented_oauth2_access { + return Err(RouteError::InvalidToken); + } + + let token = repo + .oauth2_access_token() + .find_by_token(token) + .await? + .ok_or(RouteError::InvalidToken)?; + + let session = repo + .oauth2_session() + .lookup(token.session_id) + .await? + .ok_or(RouteError::LoadFailed)?; + + activity_tracker + .record_oauth2_session(clock, &session) + .await; + + // Load the user if there is one + let user = if let Some(user_id) = session.user_id { + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::LoadFailed)?; + Some(user) + } else { + None + }; + + // If there is a user for this session, check that it is not locked + let user_valid = user.as_ref().is_none_or(User::is_valid); + + if !token.is_valid(clock.now()) || !session.is_valid() || !user_valid { + return Err(RouteError::InvalidToken); + } + + if !session.scope.contains("urn:mas:graphql:*") { + return Err(RouteError::MissingScope); + } + + RequestingEntity::OAuth2Session(Box::new((session, user))) + } else { + let maybe_session = session_info.load_active_session(&mut repo).await?; + + if let Some(session) = maybe_session.as_ref() { + activity_tracker + .record_browser_session(clock, session) + .await; + } + + RequestingEntity::from(maybe_session) + }; + + let requester = Requester { + entity, + ip_address: activity_tracker.ip(), + user_agent, + }; + + repo.cancel().await?; + Ok(requester) +} + +pub async fn post( + AxumState(schema): AxumState, + Extension(ExtraRouterParameters { + undocumented_oauth2_access, + }): Extension, + clock: BoxClock, + repo: BoxRepository, + activity_tracker: BoundActivityTracker, + cookie_jar: CookieJar, + content_type: Option>, + authorization: Option>>, + user_agent: Option>, + body: Body, +) -> Result { + let body = body.into_data_stream(); + let token = authorization + .as_ref() + .map(|TypedHeader(Authorization(bearer))| bearer.token()); + let user_agent = user_agent.map(|TypedHeader(h)| h.to_string()); + let (session_info, mut cookie_jar) = cookie_jar.session_info(); + let requester = get_requester( + undocumented_oauth2_access, + &clock, + &activity_tracker, + repo, + &session_info, + user_agent, + token, + ) + .await?; + + let content_type = content_type.map(|TypedHeader(h)| h.to_string()); + + let request = async_graphql::http::receive_body( + content_type, + body.map_err(std::io::Error::other).into_async_read(), + MultipartOptions::default(), + ) + .await? + .data(requester); // XXX: this should probably return another error response? + + let span = span_for_graphql_request(&request); + let mut response = schema.execute(request).instrument(span).await; + + if has_session_ended(&mut response) { + let session_info = session_info.mark_session_ended(); + cookie_jar = cookie_jar.update_session_info(&session_info); + } + + let cache_control = response + .cache_control + .value() + .and_then(|v| HeaderValue::from_str(&v).ok()) + .map(|h| [(CACHE_CONTROL, h)]); + + let headers = response.http_headers.clone(); + + Ok((headers, cache_control, cookie_jar, Json(response))) +} + +pub async fn get( + AxumState(schema): AxumState, + Extension(ExtraRouterParameters { + undocumented_oauth2_access, + }): Extension, + clock: BoxClock, + repo: BoxRepository, + activity_tracker: BoundActivityTracker, + cookie_jar: CookieJar, + authorization: Option>>, + user_agent: Option>, + RawQuery(query): RawQuery, +) -> Result { + let token = authorization + .as_ref() + .map(|TypedHeader(Authorization(bearer))| bearer.token()); + let user_agent = user_agent.map(|TypedHeader(h)| h.to_string()); + let (session_info, mut cookie_jar) = cookie_jar.session_info(); + let requester = get_requester( + undocumented_oauth2_access, + &clock, + &activity_tracker, + repo, + &session_info, + user_agent, + token, + ) + .await?; + + let request = + async_graphql::http::parse_query_string(&query.unwrap_or_default())?.data(requester); + + let span = span_for_graphql_request(&request); + let mut response = schema.execute(request).instrument(span).await; + + if has_session_ended(&mut response) { + let session_info = session_info.mark_session_ended(); + cookie_jar = cookie_jar.update_session_info(&session_info); + } + + let cache_control = response + .cache_control + .value() + .and_then(|v| HeaderValue::from_str(&v).ok()) + .map(|h| [(CACHE_CONTROL, h)]); + + let headers = response.http_headers.clone(); + + Ok((headers, cache_control, cookie_jar, Json(response))) +} + +pub async fn playground() -> impl IntoResponse { + Html(playground_source( + GraphQLPlaygroundConfig::new("/graphql").with_setting("request.credentials", "include"), + )) +} + +pub type Schema = async_graphql::Schema; +pub type SchemaBuilder = async_graphql::SchemaBuilder; + +#[must_use] +pub fn schema_builder() -> SchemaBuilder { + async_graphql::Schema::build(Query::new(), Mutation::new(), EmptySubscription) + .register_output_type::() + .register_output_type::() +} + +pub struct Requester { + entity: RequestingEntity, + ip_address: Option, + user_agent: Option, +} + +impl Requester { + pub fn fingerprint(&self) -> RequesterFingerprint { + if let Some(ip) = self.ip_address { + RequesterFingerprint::new(ip) + } else { + RequesterFingerprint::EMPTY + } + } + + pub fn for_policy(&self) -> mas_policy::Requester { + mas_policy::Requester { + ip_address: self.ip_address, + user_agent: self.user_agent.clone(), + } + } +} + +impl Deref for Requester { + type Target = RequestingEntity; + + fn deref(&self) -> &Self::Target { + &self.entity + } +} + +/// The identity of the requester. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub enum RequestingEntity { + /// The requester presented no authentication information. + #[default] + Anonymous, + + /// The requester is a browser session, stored in a cookie. + BrowserSession(Box), + + /// The requester is a `OAuth2` session, with an access token. + OAuth2Session(Box<(Session, Option)>), +} + +trait OwnerId { + fn owner_id(&self) -> Option; +} + +impl OwnerId for User { + fn owner_id(&self) -> Option { + Some(self.id) + } +} + +impl OwnerId for BrowserSession { + fn owner_id(&self) -> Option { + Some(self.user.id) + } +} + +impl OwnerId for mas_data_model::UserEmail { + fn owner_id(&self) -> Option { + Some(self.user_id) + } +} + +impl OwnerId for Session { + fn owner_id(&self) -> Option { + self.user_id + } +} + +impl OwnerId for mas_data_model::CompatSession { + fn owner_id(&self) -> Option { + Some(self.user_id) + } +} + +impl OwnerId for mas_data_model::UpstreamOAuthLink { + fn owner_id(&self) -> Option { + self.user_id + } +} + +/// A dumb wrapper around a `Ulid` to implement `OwnerId` for it. +pub struct UserId(Ulid); + +impl OwnerId for UserId { + fn owner_id(&self) -> Option { + Some(self.0) + } +} + +impl RequestingEntity { + fn browser_session(&self) -> Option<&BrowserSession> { + match self { + Self::BrowserSession(session) => Some(session), + Self::OAuth2Session(_) | Self::Anonymous => None, + } + } + + fn user(&self) -> Option<&User> { + match self { + Self::BrowserSession(session) => Some(&session.user), + Self::OAuth2Session(tuple) => tuple.1.as_ref(), + Self::Anonymous => None, + } + } + + fn oauth2_session(&self) -> Option<&Session> { + match self { + Self::OAuth2Session(tuple) => Some(&tuple.0), + Self::BrowserSession(_) | Self::Anonymous => None, + } + } + + /// Returns true if the requester can access the resource. + fn is_owner_or_admin(&self, resource: &impl OwnerId) -> bool { + // If the requester is an admin, they can do anything. + if self.is_admin() { + return true; + } + + // Otherwise, they must be the owner of the resource. + let Some(owner_id) = resource.owner_id() else { + return false; + }; + + let Some(user) = self.user() else { + return false; + }; + + user.id == owner_id + } + + fn is_admin(&self) -> bool { + match self { + Self::OAuth2Session(tuple) => { + // TODO: is this the right scope? + // This has to be in sync with the policy + tuple.0.scope.contains("urn:mas:admin") + } + Self::BrowserSession(_) | Self::Anonymous => false, + } + } + + fn is_unauthenticated(&self) -> bool { + matches!(self, Self::Anonymous) + } +} + +impl From for RequestingEntity { + fn from(session: BrowserSession) -> Self { + Self::BrowserSession(Box::new(session)) + } +} + +impl From> for RequestingEntity +where + T: Into, +{ + fn from(session: Option) -> Self { + session.map(Into::into).unwrap_or_default() + } +} + +/// A filter for dates, with a lower bound and an upper bound +#[derive(InputObject, Default, Clone, Copy)] +pub struct DateFilter { + /// The lower bound of the date range + after: Option>, + + /// The upper bound of the date range + before: Option>, +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/browser_sessions.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/browser_sessions.rs new file mode 100644 index 00000000..08ba2583 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/browser_sessions.rs @@ -0,0 +1,209 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{ + Context, Description, ID, Object, + connection::{Connection, Edge, OpaqueCursor, query}, +}; +use chrono::{DateTime, Utc}; +use mas_data_model::Device; +use mas_storage::{ + Pagination, RepositoryAccess, app_session::AppSessionFilter, user::BrowserSessionRepository, +}; + +use super::{ + AppSession, CompatSession, Cursor, NodeCursor, NodeType, OAuth2Session, PreloadedTotalCount, + SessionState, User, UserAgent, +}; +use crate::graphql::state::ContextExt; + +/// A browser session represents a logged in user in a browser. +#[derive(Description)] +pub struct BrowserSession(pub mas_data_model::BrowserSession); + +impl From for BrowserSession { + fn from(v: mas_data_model::BrowserSession) -> Self { + Self(v) + } +} + +#[Object(use_type_description)] +impl BrowserSession { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::BrowserSession.id(self.0.id) + } + + /// The user logged in this session. + async fn user(&self) -> User { + User(self.0.user.clone()) + } + + /// The most recent authentication of this session. + async fn last_authentication( + &self, + ctx: &Context<'_>, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let mut repo = state.repository().await?; + + let last_authentication = repo + .browser_session() + .get_last_authentication(&self.0) + .await?; + + repo.cancel().await?; + + Ok(last_authentication.map(Authentication)) + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.0.created_at + } + + /// When the session was finished. + pub async fn finished_at(&self) -> Option> { + self.0.finished_at + } + + /// The state of the session. + pub async fn state(&self) -> SessionState { + if self.0.finished_at.is_some() { + SessionState::Finished + } else { + SessionState::Active + } + } + + /// The user-agent with which the session was created. + pub async fn user_agent(&self) -> Option { + self.0 + .user_agent + .clone() + .map(mas_data_model::UserAgent::parse) + .map(UserAgent::from) + } + + /// The last IP address used by the session. + pub async fn last_active_ip(&self) -> Option { + self.0.last_active_ip.map(|ip| ip.to_string()) + } + + /// The last time the session was active. + pub async fn last_active_at(&self) -> Option> { + self.0.last_active_at + } + + /// Get the list of both compat and OAuth 2.0 sessions started by this + /// browser session, chronologically sorted + #[allow(clippy::too_many_arguments)] + async fn app_sessions( + &self, + ctx: &Context<'_>, + + #[graphql(name = "state", desc = "List only sessions in the given state.")] + state_param: Option, + + #[graphql(name = "device", desc = "List only sessions for the given device.")] + device_param: Option, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let mut repo = state.repository().await?; + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| { + x.extract_for_types(&[NodeType::OAuth2Session, NodeType::CompatSession]) + }) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| { + x.extract_for_types(&[NodeType::OAuth2Session, NodeType::CompatSession]) + }) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + let device_param = device_param.map(Device::try_from).transpose()?; + + let filter = AppSessionFilter::new().for_browser_session(&self.0); + + let filter = match state_param { + Some(SessionState::Active) => filter.active_only(), + Some(SessionState::Finished) => filter.finished_only(), + None => filter, + }; + + let filter = match device_param.as_ref() { + Some(device) => filter.for_device(device), + None => filter, + }; + + let page = repo.app_session().list(filter, pagination).await?; + + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.app_session().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + + connection + .edges + .extend(page.edges.into_iter().map(|edge| match edge.node { + mas_storage::app_session::AppSession::Compat(session) => Edge::new( + OpaqueCursor(NodeCursor(NodeType::CompatSession, session.id)), + AppSession::CompatSession(Box::new(CompatSession::new(*session))), + ), + mas_storage::app_session::AppSession::OAuth2(session) => Edge::new( + OpaqueCursor(NodeCursor(NodeType::OAuth2Session, session.id)), + AppSession::OAuth2Session(Box::new(OAuth2Session(*session))), + ), + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } +} + +/// An authentication records when a user enter their credential in a browser +/// session. +#[derive(Description)] +pub struct Authentication(pub mas_data_model::Authentication); + +#[Object(use_type_description)] +impl Authentication { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::Authentication.id(self.0.id) + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.0.created_at + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/compat_sessions.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/compat_sessions.rs new file mode 100644 index 00000000..fbbd4ab1 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/compat_sessions.rs @@ -0,0 +1,230 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use async_graphql::{Context, Description, Enum, ID, Object}; +use chrono::{DateTime, Utc}; +use mas_data_model::Device; +use mas_storage::{compat::CompatSessionRepository, user::UserRepository}; +use url::Url; + +use super::{BrowserSession, NodeType, SessionState, User, UserAgent}; +use crate::graphql::state::ContextExt; + +/// Lazy-loaded reverse reference. +/// +/// XXX: maybe we want to stick that in a utility module +#[derive(Clone, Debug, Default)] +enum ReverseReference { + Loaded(T), + #[default] + Lazy, +} + +/// A compat session represents a client session which used the legacy Matrix +/// login API. +#[derive(Description)] +pub struct CompatSession { + session: mas_data_model::CompatSession, + sso_login: ReverseReference>, +} + +impl CompatSession { + pub fn new(session: mas_data_model::CompatSession) -> Self { + Self { + session, + sso_login: ReverseReference::Lazy, + } + } + + /// Save an eagerly loaded SSO login. + pub fn with_loaded_sso_login( + mut self, + sso_login: Option, + ) -> Self { + self.sso_login = ReverseReference::Loaded(sso_login); + self + } +} + +/// The type of a compatibility session. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum CompatSessionType { + /// The session was created by a SSO login. + SsoLogin, + + /// The session was created by an unknown method. + Unknown, +} + +#[Object(use_type_description)] +impl CompatSession { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::CompatSession.id(self.session.id) + } + + /// The user authorized for this session. + async fn user(&self, ctx: &Context<'_>) -> Result { + let state = ctx.state(); + let mut repo = state.repository().await?; + let user = repo + .user() + .lookup(self.session.user_id) + .await? + .context("Could not load user")?; + repo.cancel().await?; + + Ok(User(user)) + } + + /// The Matrix Device ID of this session. + async fn device_id(&self) -> Option<&str> { + self.session.device.as_ref().map(Device::as_str) + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.session.created_at + } + + /// When the session ended. + pub async fn finished_at(&self) -> Option> { + self.session.finished_at() + } + + /// The user-agent with which the session was created. + pub async fn user_agent(&self) -> Option { + self.session + .user_agent + .clone() + .map(mas_data_model::UserAgent::parse) + .map(UserAgent::from) + } + + /// The associated SSO login, if any. + pub async fn sso_login( + &self, + ctx: &Context<'_>, + ) -> Result, async_graphql::Error> { + if let ReverseReference::Loaded(sso_login) = &self.sso_login { + return Ok(sso_login.clone().map(CompatSsoLogin)); + } + + // We need to load it on the fly + let state = ctx.state(); + let mut repo = state.repository().await?; + let sso_login = repo + .compat_sso_login() + .find_for_session(&self.session) + .await + .context("Could not load SSO login")?; + repo.cancel().await?; + + Ok(sso_login.map(CompatSsoLogin)) + } + + /// The browser session which started this session, if any. + pub async fn browser_session( + &self, + ctx: &Context<'_>, + ) -> Result, async_graphql::Error> { + let Some(user_session_id) = self.session.user_session_id else { + return Ok(None); + }; + + let state = ctx.state(); + let mut repo = state.repository().await?; + let browser_session = repo + .browser_session() + .lookup(user_session_id) + .await? + .context("Could not load browser session")?; + repo.cancel().await?; + + Ok(Some(BrowserSession(browser_session))) + } + + /// The state of the session. + pub async fn state(&self) -> SessionState { + match &self.session.state { + mas_data_model::CompatSessionState::Valid => SessionState::Active, + mas_data_model::CompatSessionState::Finished { .. } => SessionState::Finished, + } + } + + /// The last IP address used by the session. + pub async fn last_active_ip(&self) -> Option { + self.session.last_active_ip.map(|ip| ip.to_string()) + } + + /// The last time the session was active. + pub async fn last_active_at(&self) -> Option> { + self.session.last_active_at + } + + /// A human-provided name for the session. + pub async fn human_name(&self) -> Option<&str> { + self.session.human_name.as_deref() + } +} + +/// A compat SSO login represents a login done through the legacy Matrix login +/// API, via the `m.login.sso` login method. +#[derive(Description)] +pub struct CompatSsoLogin(pub mas_data_model::CompatSsoLogin); + +#[Object(use_type_description)] +impl CompatSsoLogin { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::CompatSsoLogin.id(self.0.id) + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.0.created_at + } + + /// The redirect URI used during the login. + async fn redirect_uri(&self) -> &Url { + &self.0.redirect_uri + } + + /// When the login was fulfilled, and the user was redirected back to the + /// client. + async fn fulfilled_at(&self) -> Option> { + self.0.fulfilled_at() + } + + /// When the client exchanged the login token sent during the redirection. + async fn exchanged_at(&self) -> Option> { + self.0.exchanged_at() + } + + /// The compat session which was started by this login. + async fn session( + &self, + ctx: &Context<'_>, + ) -> Result, async_graphql::Error> { + let Some(session_id) = self.0.session_id() else { + return Ok(None); + }; + + let state = ctx.state(); + let mut repo = state.repository().await?; + let session = repo + .compat_session() + .lookup(session_id) + .await? + .context("Could not load compat session")?; + repo.cancel().await?; + + Ok(Some( + CompatSession::new(session).with_loaded_sso_login(Some(self.0.clone())), + )) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/cursor.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/cursor.rs new file mode 100644 index 00000000..b7b498d0 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/cursor.rs @@ -0,0 +1,34 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::connection::OpaqueCursor; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +pub use super::NodeType; + +#[derive(Serialize, Deserialize, PartialEq, Eq)] +pub struct NodeCursor(pub NodeType, pub Ulid); + +impl NodeCursor { + pub fn extract_for_types(&self, node_types: &[NodeType]) -> Result { + if node_types.contains(&self.0) { + Ok(self.1) + } else { + Err(async_graphql::Error::new("invalid cursor")) + } + } + + pub fn extract_for_type(&self, node_type: NodeType) -> Result { + if self.0 == node_type { + Ok(self.1) + } else { + Err(async_graphql::Error::new("invalid cursor")) + } + } +} + +pub type Cursor = OpaqueCursor; diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/matrix.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/matrix.rs new file mode 100644 index 00000000..7316c0d6 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/matrix.rs @@ -0,0 +1,41 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::SimpleObject; +use mas_matrix::HomeserverConnection; + +#[derive(SimpleObject)] +pub struct MatrixUser { + /// The Matrix ID of the user. + mxid: String, + + /// The display name of the user, if any. + display_name: Option, + + /// The avatar URL of the user, if any. + avatar_url: Option, + + /// Whether the user is deactivated on the homeserver. + deactivated: bool, +} + +impl MatrixUser { + pub(crate) async fn load( + conn: &C, + user: &str, + ) -> Result { + let info = conn.query_user(user).await?; + + let mxid = conn.mxid(user); + + Ok(MatrixUser { + mxid, + display_name: info.displayname, + avatar_url: info.avatar_url, + deactivated: info.deactivated, + }) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/mod.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/mod.rs new file mode 100644 index 00000000..063a63fb --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/mod.rs @@ -0,0 +1,137 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{Enum, Interface, Object, SimpleObject}; +use chrono::{DateTime, Utc}; + +mod browser_sessions; +mod compat_sessions; +mod cursor; +mod matrix; +mod node; +mod oauth; +mod site_config; +mod upstream_oauth; +mod users; +mod viewer; + +pub use self::{ + browser_sessions::{Authentication, BrowserSession}, + compat_sessions::{CompatSession, CompatSsoLogin}, + cursor::{Cursor, NodeCursor}, + node::{Node, NodeType}, + oauth::{OAuth2Client, OAuth2Session}, + site_config::{SITE_CONFIG_ID, SiteConfig}, + upstream_oauth::{UpstreamOAuth2Link, UpstreamOAuth2Provider}, + users::{AppSession, User, UserEmail, UserEmailAuthentication, UserRecoveryTicket}, + viewer::{Anonymous, Viewer, ViewerSession}, +}; + +/// An object with a creation date. +#[derive(Interface)] +#[graphql(field( + name = "created_at", + desc = "When the object was created.", + ty = "DateTime" +))] +pub enum CreationEvent { + Authentication(Box), + CompatSession(Box), + BrowserSession(Box), + UserEmail(Box), + UserEmailAuthentication(Box), + UserRecoveryTicket(Box), + UpstreamOAuth2Provider(Box), + UpstreamOAuth2Link(Box), + OAuth2Session(Box), +} + +pub struct PreloadedTotalCount(pub Option); + +#[Object] +impl PreloadedTotalCount { + /// Identifies the total count of items in the connection. + async fn total_count(&self) -> Result { + self.0 + .ok_or_else(|| async_graphql::Error::new("total count not preloaded")) + } +} + +/// The state of a session +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum SessionState { + /// The session is active. + Active, + + /// The session is no longer active. + Finished, +} + +/// The type of a user agent +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum DeviceType { + /// A personal computer, laptop or desktop + Pc, + + /// A mobile phone. Can also sometimes be a tablet. + Mobile, + + /// A tablet + Tablet, + + /// Unknown device type + Unknown, +} + +impl From for DeviceType { + fn from(device_type: mas_data_model::DeviceType) -> Self { + match device_type { + mas_data_model::DeviceType::Pc => Self::Pc, + mas_data_model::DeviceType::Mobile => Self::Mobile, + mas_data_model::DeviceType::Tablet => Self::Tablet, + mas_data_model::DeviceType::Unknown => Self::Unknown, + } + } +} + +/// A parsed user agent string +#[derive(SimpleObject)] +pub struct UserAgent { + /// The user agent string + pub raw: String, + + /// The name of the browser + pub name: Option, + + /// The version of the browser + pub version: Option, + + /// The operating system name + pub os: Option, + + /// The operating system version + pub os_version: Option, + + /// The device model + pub model: Option, + + /// The device type + pub device_type: DeviceType, +} + +impl From for UserAgent { + fn from(ua: mas_data_model::UserAgent) -> Self { + Self { + raw: ua.raw, + name: ua.name, + version: ua.version, + os: ua.os, + os_version: ua.os_version, + model: ua.model, + device_type: ua.device_type.into(), + } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/node.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/node.rs new file mode 100644 index 00000000..e63d2b38 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/node.rs @@ -0,0 +1,132 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{ID, Interface}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::Ulid; + +use super::{ + Anonymous, Authentication, BrowserSession, CompatSession, CompatSsoLogin, OAuth2Client, + OAuth2Session, SiteConfig, UpstreamOAuth2Link, UpstreamOAuth2Provider, User, UserEmail, + UserEmailAuthentication, UserRecoveryTicket, +}; + +#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum NodeType { + Authentication, + BrowserSession, + CompatSession, + CompatSsoLogin, + OAuth2Client, + OAuth2Session, + UpstreamOAuth2Provider, + UpstreamOAuth2Link, + User, + UserEmail, + UserEmailAuthentication, + UserRecoveryTicket, +} + +#[derive(Debug, Error)] +#[error("invalid id")] +pub enum InvalidID { + InvalidFormat, + InvalidUlid(#[from] ulid::DecodeError), + UnknownPrefix, + TypeMismatch { got: NodeType, expected: NodeType }, +} + +impl NodeType { + fn to_prefix(self) -> &'static str { + match self { + NodeType::Authentication => "authentication", + NodeType::BrowserSession => "browser_session", + NodeType::CompatSession => "compat_session", + NodeType::CompatSsoLogin => "compat_sso_login", + NodeType::OAuth2Client => "oauth2_client", + NodeType::OAuth2Session => "oauth2_session", + NodeType::UpstreamOAuth2Provider => "upstream_oauth2_provider", + NodeType::UpstreamOAuth2Link => "upstream_oauth2_link", + NodeType::User => "user", + NodeType::UserEmail => "user_email", + NodeType::UserEmailAuthentication => "user_email_authentication", + NodeType::UserRecoveryTicket => "user_recovery_ticket", + } + } + + fn from_prefix(prefix: &str) -> Option { + match prefix { + "authentication" => Some(NodeType::Authentication), + "browser_session" => Some(NodeType::BrowserSession), + "compat_session" => Some(NodeType::CompatSession), + "compat_sso_login" => Some(NodeType::CompatSsoLogin), + "oauth2_client" => Some(NodeType::OAuth2Client), + "oauth2_session" => Some(NodeType::OAuth2Session), + "upstream_oauth2_provider" => Some(NodeType::UpstreamOAuth2Provider), + "upstream_oauth2_link" => Some(NodeType::UpstreamOAuth2Link), + "user" => Some(NodeType::User), + "user_email" => Some(NodeType::UserEmail), + "user_email_authentication" => Some(NodeType::UserEmailAuthentication), + "user_recovery_ticket" => Some(NodeType::UserRecoveryTicket), + _ => None, + } + } + + pub fn serialize(self, id: impl Into) -> String { + let prefix = self.to_prefix(); + let id = id.into(); + format!("{prefix}:{id}") + } + + pub fn id(self, id: impl Into) -> ID { + ID(self.serialize(id)) + } + + pub fn deserialize(serialized: &str) -> Result<(Self, Ulid), InvalidID> { + let (prefix, id) = serialized.split_once(':').ok_or(InvalidID::InvalidFormat)?; + let prefix = NodeType::from_prefix(prefix).ok_or(InvalidID::UnknownPrefix)?; + let id = id.parse()?; + Ok((prefix, id)) + } + + pub fn from_id(id: &ID) -> Result<(Self, Ulid), InvalidID> { + Self::deserialize(&id.0) + } + + pub fn extract_ulid(self, id: &ID) -> Result { + let (node_type, ulid) = Self::deserialize(&id.0)?; + + if node_type == self { + Ok(ulid) + } else { + Err(InvalidID::TypeMismatch { + got: node_type, + expected: self, + }) + } + } +} + +/// An object with an ID. +#[derive(Interface)] +#[graphql(field(name = "id", desc = "ID of the object.", ty = "ID"))] +pub enum Node { + Anonymous(Box), + Authentication(Box), + BrowserSession(Box), + CompatSession(Box), + CompatSsoLogin(Box), + OAuth2Client(Box), + OAuth2Session(Box), + SiteConfig(Box), + UpstreamOAuth2Provider(Box), + UpstreamOAuth2Link(Box), + User(Box), + UserEmail(Box), + UserEmailAuthentication(Box), + UserRecoveryTicket(Box), +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/oauth.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/oauth.rs new file mode 100644 index 00000000..20a4d527 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/oauth.rs @@ -0,0 +1,201 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use async_graphql::{Context, Description, Enum, ID, Object}; +use chrono::{DateTime, Utc}; +use mas_storage::{oauth2::OAuth2ClientRepository, user::BrowserSessionRepository}; +use oauth2_types::oidc::ApplicationType; +use url::Url; + +use super::{BrowserSession, NodeType, SessionState, User, UserAgent}; +use crate::graphql::{UserId, state::ContextExt}; + +/// An OAuth 2.0 session represents a client session which used the OAuth APIs +/// to login. +#[derive(Description)] +pub struct OAuth2Session(pub mas_data_model::Session); + +#[Object(use_type_description)] +impl OAuth2Session { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::OAuth2Session.id(self.0.id) + } + + /// OAuth 2.0 client used by this session. + pub async fn client(&self, ctx: &Context<'_>) -> Result { + let state = ctx.state(); + let mut repo = state.repository().await?; + let client = repo + .oauth2_client() + .lookup(self.0.client_id) + .await? + .context("Could not load client")?; + repo.cancel().await?; + + Ok(OAuth2Client(client)) + } + + /// Scope granted for this session. + pub async fn scope(&self) -> String { + self.0.scope.to_string() + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.0.created_at + } + + /// When the session ended. + pub async fn finished_at(&self) -> Option> { + match &self.0.state { + mas_data_model::SessionState::Valid => None, + mas_data_model::SessionState::Finished { finished_at } => Some(*finished_at), + } + } + + /// The user-agent with which the session was created. + pub async fn user_agent(&self) -> Option { + self.0 + .user_agent + .clone() + .map(mas_data_model::UserAgent::parse) + .map(UserAgent::from) + } + + /// The state of the session. + pub async fn state(&self) -> SessionState { + match &self.0.state { + mas_data_model::SessionState::Valid => SessionState::Active, + mas_data_model::SessionState::Finished { .. } => SessionState::Finished, + } + } + + /// The browser session which started this OAuth 2.0 session. + pub async fn browser_session( + &self, + ctx: &Context<'_>, + ) -> Result, async_graphql::Error> { + let Some(user_session_id) = self.0.user_session_id else { + return Ok(None); + }; + + let state = ctx.state(); + let mut repo = state.repository().await?; + let browser_session = repo + .browser_session() + .lookup(user_session_id) + .await? + .context("Could not load browser session")?; + repo.cancel().await?; + + Ok(Some(BrowserSession(browser_session))) + } + + /// User authorized for this session. + pub async fn user(&self, ctx: &Context<'_>) -> Result, async_graphql::Error> { + let state = ctx.state(); + let Some(user_id) = self.0.user_id else { + return Ok(None); + }; + + if !ctx.requester().is_owner_or_admin(&UserId(user_id)) { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let mut repo = state.repository().await?; + let user = repo + .user() + .lookup(user_id) + .await? + .context("Could not load user")?; + repo.cancel().await?; + + Ok(Some(User(user))) + } + + /// The last IP address used by the session. + pub async fn last_active_ip(&self) -> Option { + self.0.last_active_ip.map(|ip| ip.to_string()) + } + + /// The last time the session was active. + pub async fn last_active_at(&self) -> Option> { + self.0.last_active_at + } + + /// The user-provided name for this session. + pub async fn human_name(&self) -> Option<&str> { + self.0.human_name.as_deref() + } +} + +/// The application type advertised by the client. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum OAuth2ApplicationType { + /// Client is a web application. + Web, + + /// Client is a native application. + Native, +} + +/// An OAuth 2.0 client +#[derive(Description)] +pub struct OAuth2Client(pub mas_data_model::Client); + +#[Object(use_type_description)] +impl OAuth2Client { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::OAuth2Client.id(self.0.id) + } + + /// OAuth 2.0 client ID + pub async fn client_id(&self) -> &str { + &self.0.client_id + } + + /// Client name advertised by the client. + pub async fn client_name(&self) -> Option<&str> { + self.0.client_name.as_deref() + } + + /// Client URI advertised by the client. + pub async fn client_uri(&self) -> Option<&Url> { + self.0.client_uri.as_ref() + } + + /// Logo URI advertised by the client. + pub async fn logo_uri(&self) -> Option<&Url> { + self.0.logo_uri.as_ref() + } + + /// Terms of services URI advertised by the client. + pub async fn tos_uri(&self) -> Option<&Url> { + self.0.tos_uri.as_ref() + } + + /// Privacy policy URI advertised by the client. + pub async fn policy_uri(&self) -> Option<&Url> { + self.0.policy_uri.as_ref() + } + + /// List of redirect URIs used for authorization grants by the client. + pub async fn redirect_uris(&self) -> &[Url] { + &self.0.redirect_uris + } + + /// The application type advertised by the client. + pub async fn application_type(&self) -> Option { + match self.0.application_type.as_ref()? { + ApplicationType::Web => Some(OAuth2ApplicationType::Web), + ApplicationType::Native => Some(OAuth2ApplicationType::Native), + ApplicationType::Unknown(_) => None, + } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/site_config.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/site_config.rs new file mode 100644 index 00000000..d6966907 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/site_config.rs @@ -0,0 +1,135 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::str_to_string)] // ComplexObject macro uses &str.to_string() + +use async_graphql::{ComplexObject, Enum, ID, SimpleObject}; +use url::Url; + +pub const SITE_CONFIG_ID: &str = "site_config"; +pub const CAPTCHA_CONFIG_ID: &str = "captcha_config"; + +#[derive(SimpleObject)] +#[graphql(complex)] +#[allow(clippy::struct_excessive_bools)] +pub struct SiteConfig { + /// The configuration of CAPTCHA provider. + captcha_config: Option, + + /// The server name of the homeserver. + server_name: String, + + /// The URL to the privacy policy. + policy_uri: Option, + + /// The URL to the terms of service. + tos_uri: Option, + + /// Imprint to show in the footer. + imprint: Option, + + /// Whether users can change their email. + email_change_allowed: bool, + + /// Whether users can change their display name. + display_name_change_allowed: bool, + + /// Whether passwords are enabled for login. + password_login_enabled: bool, + + /// Whether passwords are enabled and users can change their own passwords. + password_change_allowed: bool, + + /// Whether passwords are enabled and users can register using a password. + password_registration_enabled: bool, + + /// Whether users can delete their own account. + account_deactivation_allowed: bool, + + /// Minimum password complexity, from 0 to 4, in terms of a zxcvbn score. + /// The exact scorer (including dictionaries and other data tables) + /// in use is . + minimum_password_complexity: u8, + + /// Whether users can log in with their email address. + login_with_email_allowed: bool, + + /// Experimental plan management iframe URI. + plan_management_iframe_uri: Option, +} + +#[derive(SimpleObject)] +#[graphql(complex)] +pub struct CaptchaConfig { + /// Which Captcha service is being used + pub service: CaptchaService, + + /// The site key used by the instance + pub site_key: String, +} + +/// Which Captcha service is being used +#[derive(Enum, Debug, Clone, Copy, PartialEq, Eq)] +pub enum CaptchaService { + RecaptchaV2, + CloudflareTurnstile, + HCaptcha, +} + +#[ComplexObject] +impl SiteConfig { + /// The ID of the site configuration. + pub async fn id(&self) -> ID { + SITE_CONFIG_ID.into() + } +} + +impl SiteConfig { + /// Create a new [`SiteConfig`] from the data model + /// [`mas_data_model:::SiteConfig`]. + pub fn new(data_model: &mas_data_model::SiteConfig) -> Self { + Self { + captcha_config: data_model.captcha.as_ref().map(CaptchaConfig::new), + server_name: data_model.server_name.clone(), + policy_uri: data_model.policy_uri.clone(), + tos_uri: data_model.tos_uri.clone(), + imprint: data_model.imprint.clone(), + email_change_allowed: data_model.email_change_allowed, + display_name_change_allowed: data_model.displayname_change_allowed, + password_login_enabled: data_model.password_login_enabled, + password_change_allowed: data_model.password_change_allowed, + password_registration_enabled: data_model.password_registration_enabled, + account_deactivation_allowed: data_model.account_deactivation_allowed, + minimum_password_complexity: data_model.minimum_password_complexity, + login_with_email_allowed: data_model.login_with_email_allowed, + plan_management_iframe_uri: data_model.plan_management_iframe_uri.clone(), + } + } +} + +#[ComplexObject] +impl CaptchaConfig { + pub async fn id(&self) -> ID { + CAPTCHA_CONFIG_ID.into() + } +} + +impl CaptchaConfig { + /// Create a new [`CaptchaConfig`] from the data model + /// [`mas_data_model:::CaptchaConfig`]. + pub fn new(data_model: &mas_data_model::CaptchaConfig) -> Self { + Self { + service: match data_model.service { + mas_data_model::CaptchaService::RecaptchaV2 => CaptchaService::RecaptchaV2, + mas_data_model::CaptchaService::CloudflareTurnstile => { + CaptchaService::CloudflareTurnstile + } + mas_data_model::CaptchaService::HCaptcha => CaptchaService::HCaptcha, + }, + site_key: data_model.site_key.clone(), + } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/upstream_oauth.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/upstream_oauth.rs new file mode 100644 index 00000000..faa30a75 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/upstream_oauth.rs @@ -0,0 +1,161 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use async_graphql::{Context, ID, Object}; +use chrono::{DateTime, Utc}; +use mas_storage::{upstream_oauth2::UpstreamOAuthProviderRepository, user::UserRepository}; +use url::Url; + +use super::{NodeType, User}; +use crate::graphql::state::ContextExt; + +#[derive(Debug, Clone)] +pub struct UpstreamOAuth2Provider { + provider: mas_data_model::UpstreamOAuthProvider, +} + +impl UpstreamOAuth2Provider { + #[must_use] + pub const fn new(provider: mas_data_model::UpstreamOAuthProvider) -> Self { + Self { provider } + } +} + +#[Object] +impl UpstreamOAuth2Provider { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::UpstreamOAuth2Provider.id(self.provider.id) + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.provider.created_at + } + + /// OpenID Connect issuer URL. + pub async fn issuer(&self) -> Option<&str> { + self.provider.issuer.as_deref() + } + + /// Client ID used for this provider. + pub async fn client_id(&self) -> &str { + &self.provider.client_id + } + + /// A human-readable name for this provider. + pub async fn human_name(&self) -> Option<&str> { + self.provider.human_name.as_deref() + } + + /// A brand identifier for this provider. + /// + /// One of `google`, `github`, `gitlab`, `apple` or `facebook`. + pub async fn brand_name(&self) -> Option<&str> { + self.provider.brand_name.as_deref() + } + + /// URL to start the linking process of the current user with this provider. + pub async fn link_url(&self, context: &Context<'_>) -> Url { + let state = context.state(); + let url_builder = state.url_builder(); + let route = mas_router::UpstreamOAuth2Authorize::new(self.provider.id); + url_builder.absolute_url_for(&route) + } +} + +impl UpstreamOAuth2Link { + #[must_use] + pub const fn new(link: mas_data_model::UpstreamOAuthLink) -> Self { + Self { + link, + provider: None, + user: None, + } + } +} + +#[derive(Debug, Clone)] +pub struct UpstreamOAuth2Link { + link: mas_data_model::UpstreamOAuthLink, + provider: Option, + user: Option, +} + +#[Object] +impl UpstreamOAuth2Link { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::UpstreamOAuth2Link.id(self.link.id) + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.link.created_at + } + + /// Subject used for linking + pub async fn subject(&self) -> &str { + &self.link.subject + } + + /// A human-readable name for the link subject. + pub async fn human_account_name(&self) -> Option<&str> { + self.link.human_account_name.as_deref() + } + + /// The provider for which this link is. + pub async fn provider( + &self, + ctx: &Context<'_>, + ) -> Result { + let state = ctx.state(); + let provider = if let Some(provider) = &self.provider { + // Cached + provider.clone() + } else { + // Fetch on-the-fly + let mut repo = state.repository().await?; + + let provider = repo + .upstream_oauth_provider() + .lookup(self.link.provider_id) + .await? + .context("Upstream OAuth 2.0 provider not found")?; + repo.cancel().await?; + + provider + }; + + Ok(UpstreamOAuth2Provider::new(provider)) + } + + /// The user to which this link is associated. + pub async fn user(&self, ctx: &Context<'_>) -> Result, async_graphql::Error> { + let state = ctx.state(); + let user = if let Some(user) = &self.user { + // Cached + user.clone() + } else if let Some(user_id) = &self.link.user_id { + // Fetch on-the-fly + let mut repo = state.repository().await?; + + let user = repo + .user() + .lookup(*user_id) + .await? + .context("User not found")?; + repo.cancel().await?; + + user + } else { + return Ok(None); + }; + + Ok(Some(User(user))) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/users.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/users.rs new file mode 100644 index 00000000..7e615df7 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/users.rs @@ -0,0 +1,888 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use async_graphql::{ + Context, Description, Enum, ID, Object, Union, + connection::{Connection, Edge, OpaqueCursor, query}, +}; +use chrono::{DateTime, Utc}; +use mas_data_model::Device; +use mas_storage::{ + Pagination, RepositoryAccess, + app_session::AppSessionFilter, + compat::{CompatSessionFilter, CompatSsoLoginFilter, CompatSsoLoginRepository}, + oauth2::{OAuth2SessionFilter, OAuth2SessionRepository}, + upstream_oauth2::{UpstreamOAuthLinkFilter, UpstreamOAuthLinkRepository}, + user::{BrowserSessionFilter, BrowserSessionRepository, UserEmailFilter, UserEmailRepository}, +}; + +use super::{ + BrowserSession, CompatSession, Cursor, NodeCursor, NodeType, OAuth2Session, + PreloadedTotalCount, SessionState, UpstreamOAuth2Link, + compat_sessions::{CompatSessionType, CompatSsoLogin}, + matrix::MatrixUser, +}; +use crate::graphql::{DateFilter, state::ContextExt}; + +#[derive(Description)] +/// A user is an individual's account. +pub struct User(pub mas_data_model::User); + +impl From for User { + fn from(v: mas_data_model::User) -> Self { + Self(v) + } +} + +impl From for User { + fn from(v: mas_data_model::BrowserSession) -> Self { + Self(v.user) + } +} + +#[Object(use_type_description)] +impl User { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::User.id(self.0.id) + } + + /// Username chosen by the user. + async fn username(&self) -> &str { + &self.0.username + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.0.created_at + } + + /// When the user was locked out. + pub async fn locked_at(&self) -> Option> { + self.0.locked_at + } + + /// Whether the user can request admin privileges. + pub async fn can_request_admin(&self) -> bool { + self.0.can_request_admin + } + + /// Access to the user's Matrix account information. + async fn matrix(&self, ctx: &Context<'_>) -> Result { + let state = ctx.state(); + let conn = state.homeserver_connection(); + Ok(MatrixUser::load(conn, &self.0.username).await?) + } + + /// Get the list of compatibility SSO logins, chronologically sorted + async fn compat_sso_logins( + &self, + ctx: &Context<'_>, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let mut repo = state.repository().await?; + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::CompatSsoLogin)) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::CompatSsoLogin)) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + let filter = CompatSsoLoginFilter::new().for_user(&self.0); + + let page = repo.compat_sso_login().list(filter, pagination).await?; + + // Preload the total count if requested + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.compat_sso_login().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + connection.edges.extend(page.edges.into_iter().map(|edge| { + Edge::new( + OpaqueCursor(NodeCursor(NodeType::CompatSsoLogin, edge.cursor)), + CompatSsoLogin(edge.node), + ) + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } + + /// Get the list of compatibility sessions, chronologically sorted + #[allow(clippy::too_many_arguments)] + async fn compat_sessions( + &self, + ctx: &Context<'_>, + + #[graphql(name = "state", desc = "List only sessions with the given state.")] + state_param: Option, + + #[graphql(name = "type", desc = "List only sessions with the given type.")] + type_param: Option, + + #[graphql( + name = "lastActive", + desc = "List only sessions with a last active time is between the given bounds." + )] + last_active: Option, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let mut repo = state.repository().await?; + let last_active = last_active.unwrap_or_default(); + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::CompatSession)) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::CompatSession)) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + // Build the query filter + let filter = CompatSessionFilter::new().for_user(&self.0); + let filter = match state_param { + Some(SessionState::Active) => filter.active_only(), + Some(SessionState::Finished) => filter.finished_only(), + None => filter, + }; + let filter = match type_param { + Some(CompatSessionType::SsoLogin) => filter.sso_login_only(), + Some(CompatSessionType::Unknown) => filter.unknown_only(), + None => filter, + }; + + let filter = match last_active.after { + Some(after) => filter.with_last_active_after(after), + None => filter, + }; + let filter = match last_active.before { + Some(before) => filter.with_last_active_before(before), + None => filter, + }; + + let page = repo.compat_session().list(filter, pagination).await?; + + // Preload the total count if requested + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.compat_session().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + connection.edges.extend(page.edges.into_iter().map(|edge| { + let (session, sso_login) = edge.node; + Edge::new( + OpaqueCursor(NodeCursor(NodeType::CompatSession, session.id)), + CompatSession::new(session).with_loaded_sso_login(sso_login), + ) + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } + + /// Get the list of active browser sessions, chronologically sorted + async fn browser_sessions( + &self, + ctx: &Context<'_>, + + #[graphql(name = "state", desc = "List only sessions in the given state.")] + state_param: Option, + + #[graphql( + name = "lastActive", + desc = "List only sessions with a last active time is between the given bounds." + )] + last_active: Option, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let mut repo = state.repository().await?; + let last_active = last_active.unwrap_or_default(); + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::BrowserSession)) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::BrowserSession)) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + let filter = BrowserSessionFilter::new().for_user(&self.0); + let filter = match state_param { + Some(SessionState::Active) => filter.active_only(), + Some(SessionState::Finished) => filter.finished_only(), + None => filter, + }; + + let filter = match last_active.after { + Some(after) => filter.with_last_active_after(after), + None => filter, + }; + let filter = match last_active.before { + Some(before) => filter.with_last_active_before(before), + None => filter, + }; + + let page = repo.browser_session().list(filter, pagination).await?; + + // Preload the total count if requested + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.browser_session().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + connection.edges.extend(page.edges.into_iter().map(|edge| { + Edge::new( + OpaqueCursor(NodeCursor(NodeType::BrowserSession, edge.cursor)), + BrowserSession(edge.node), + ) + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } + + /// Get the list of emails, chronologically sorted + async fn emails( + &self, + ctx: &Context<'_>, + + #[graphql( + deprecation = "Emails are always confirmed, and have only one state", + name = "state", + desc = "List only emails in the given state." + )] + state_param: Option, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let mut repo = state.repository().await?; + let _ = state_param; + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::UserEmail)) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::UserEmail)) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + let filter = UserEmailFilter::new().for_user(&self.0); + + let page = repo.user_email().list(filter, pagination).await?; + + // Preload the total count if requested + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.user_email().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + connection.edges.extend(page.edges.into_iter().map(|edge| { + Edge::new( + OpaqueCursor(NodeCursor(NodeType::UserEmail, edge.cursor)), + UserEmail(edge.node), + ) + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } + + /// Get the list of OAuth 2.0 sessions, chronologically sorted + #[allow(clippy::too_many_arguments)] + async fn oauth2_sessions( + &self, + ctx: &Context<'_>, + + #[graphql(name = "state", desc = "List only sessions in the given state.")] + state_param: Option, + + #[graphql(desc = "List only sessions for the given client.")] client: Option, + + #[graphql( + name = "lastActive", + desc = "List only sessions with a last active time is between the given bounds." + )] + last_active: Option, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let mut repo = state.repository().await?; + let last_active = last_active.unwrap_or_default(); + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::OAuth2Session)) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::OAuth2Session)) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + let client = if let Some(id) = client { + // Load the client if we're filtering by it + let id = NodeType::OAuth2Client.extract_ulid(&id)?; + let client = repo + .oauth2_client() + .lookup(id) + .await? + .ok_or(async_graphql::Error::new("Unknown client ID"))?; + + Some(client) + } else { + None + }; + + let filter = OAuth2SessionFilter::new().for_user(&self.0); + + let filter = match state_param { + Some(SessionState::Active) => filter.active_only(), + Some(SessionState::Finished) => filter.finished_only(), + None => filter, + }; + + let filter = match client.as_ref() { + Some(client) => filter.for_client(client), + None => filter, + }; + + let filter = match last_active.after { + Some(after) => filter.with_last_active_after(after), + None => filter, + }; + let filter = match last_active.before { + Some(before) => filter.with_last_active_before(before), + None => filter, + }; + + let page = repo.oauth2_session().list(filter, pagination).await?; + + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.oauth2_session().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + + connection.edges.extend(page.edges.into_iter().map(|edge| { + Edge::new( + OpaqueCursor(NodeCursor(NodeType::OAuth2Session, edge.cursor)), + OAuth2Session(edge.node), + ) + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } + + /// Get the list of upstream OAuth 2.0 links + async fn upstream_oauth2_links( + &self, + ctx: &Context<'_>, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> + { + let state = ctx.state(); + let mut repo = state.repository().await?; + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| { + x.extract_for_type(NodeType::UpstreamOAuth2Link) + }) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| { + x.extract_for_type(NodeType::UpstreamOAuth2Link) + }) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + let filter = UpstreamOAuthLinkFilter::new() + .for_user(&self.0) + .enabled_providers_only(); + + let page = repo.upstream_oauth_link().list(filter, pagination).await?; + + // Preload the total count if requested + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.upstream_oauth_link().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + connection.edges.extend(page.edges.into_iter().map(|edge| { + Edge::new( + OpaqueCursor(NodeCursor(NodeType::UpstreamOAuth2Link, edge.cursor)), + UpstreamOAuth2Link::new(edge.node), + ) + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } + + /// Get the list of both compat and OAuth 2.0 sessions, chronologically + /// sorted + #[allow(clippy::too_many_arguments)] + async fn app_sessions( + &self, + ctx: &Context<'_>, + + #[graphql(name = "state", desc = "List only sessions in the given state.")] + state_param: Option, + + #[graphql(name = "device", desc = "List only sessions for the given device.")] + device_param: Option, + + #[graphql( + name = "lastActive", + desc = "List only sessions with a last active time is between the given bounds." + )] + last_active: Option, + + #[graphql( + name = "browserSession", + desc = "List only sessions for the given session." + )] + browser_session_param: Option, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let requester = ctx.requester(); + let mut repo = state.repository().await?; + let last_active = last_active.unwrap_or_default(); + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| { + x.extract_for_types(&[NodeType::OAuth2Session, NodeType::CompatSession]) + }) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| { + x.extract_for_types(&[NodeType::OAuth2Session, NodeType::CompatSession]) + }) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + let device_param = device_param.map(Device::try_from).transpose()?; + + let filter = AppSessionFilter::new().for_user(&self.0); + + let filter = match state_param { + Some(SessionState::Active) => filter.active_only(), + Some(SessionState::Finished) => filter.finished_only(), + None => filter, + }; + + let filter = match device_param.as_ref() { + Some(device) => filter.for_device(device), + None => filter, + }; + + let maybe_session = match browser_session_param { + Some(id) => { + // This might fail, but we're probably alright with it + let id = NodeType::BrowserSession + .extract_ulid(&id) + .context("Invalid browser_session parameter")?; + + let Some(session) = repo + .browser_session() + .lookup(id) + .await? + .filter(|u| requester.is_owner_or_admin(u)) + else { + // If we couldn't find the session or if the requester can't access it, + // return an empty list + return Ok(Connection::with_additional_fields( + false, + false, + PreloadedTotalCount(Some(0)), + )); + }; + + Some(session) + } + None => None, + }; + + let filter = match maybe_session { + Some(ref session) => filter.for_browser_session(session), + None => filter, + }; + + let filter = match last_active.after { + Some(after) => filter.with_last_active_after(after), + None => filter, + }; + let filter = match last_active.before { + Some(before) => filter.with_last_active_before(before), + None => filter, + }; + + let page = repo.app_session().list(filter, pagination).await?; + + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.app_session().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + + connection + .edges + .extend(page.edges.into_iter().map(|edge| match edge.node { + mas_storage::app_session::AppSession::Compat(session) => Edge::new( + OpaqueCursor(NodeCursor(NodeType::CompatSession, edge.cursor)), + AppSession::CompatSession(Box::new(CompatSession::new(*session))), + ), + mas_storage::app_session::AppSession::OAuth2(session) => Edge::new( + OpaqueCursor(NodeCursor(NodeType::OAuth2Session, edge.cursor)), + AppSession::OAuth2Session(Box::new(OAuth2Session(*session))), + ), + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } + + /// Check if the user has a password set. + async fn has_password(&self, ctx: &Context<'_>) -> Result { + let state = ctx.state(); + let mut repo = state.repository().await?; + + let password = repo.user_password().active(&self.0).await?; + + Ok(password.is_some()) + } +} + +/// A session in an application, either a compatibility or an OAuth 2.0 one +#[derive(Union)] +pub enum AppSession { + CompatSession(Box), + OAuth2Session(Box), +} + +/// A user email address +#[derive(Description)] +pub struct UserEmail(pub mas_data_model::UserEmail); + +#[Object(use_type_description)] +impl UserEmail { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::UserEmail.id(self.0.id) + } + + /// Email address + async fn email(&self) -> &str { + &self.0.email + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.0.created_at + } + + /// When the email address was confirmed. Is `null` if the email was never + /// verified by the user. + #[graphql(deprecation = "Emails are always confirmed now.")] + async fn confirmed_at(&self) -> Option> { + Some(self.0.created_at) + } +} + +/// The state of a compatibility session. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum UserEmailState { + /// The email address is pending confirmation. + Pending, + + /// The email address has been confirmed. + Confirmed, +} + +/// A recovery ticket +#[derive(Description)] +pub struct UserRecoveryTicket(pub mas_data_model::UserRecoveryTicket); + +/// The status of a recovery ticket +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum UserRecoveryTicketStatus { + /// The ticket is valid + Valid, + + /// The ticket has expired + Expired, + + /// The ticket has been consumed + Consumed, +} + +#[Object(use_type_description)] +impl UserRecoveryTicket { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::UserRecoveryTicket.id(self.0.id) + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.0.created_at + } + + /// The status of the ticket + pub async fn status( + &self, + context: &Context<'_>, + ) -> Result { + let state = context.state(); + let clock = state.clock(); + let mut repo = state.repository().await?; + + // Lookup the session associated with the ticket + let session = repo + .user_recovery() + .lookup_session(self.0.user_recovery_session_id) + .await? + .context("Failed to lookup session")?; + repo.cancel().await?; + + if session.consumed_at.is_some() { + return Ok(UserRecoveryTicketStatus::Consumed); + } + + if self.0.expires_at < clock.now() { + return Ok(UserRecoveryTicketStatus::Expired); + } + + Ok(UserRecoveryTicketStatus::Valid) + } + + /// The username associated with this ticket + pub async fn username(&self, ctx: &Context<'_>) -> Result { + // We could expose the UserEmail, then the User, but this is unauthenticated, so + // we don't want to risk leaking too many objects. Instead, we just give the + // username as a property of the UserRecoveryTicket + let state = ctx.state(); + let mut repo = state.repository().await?; + let user_email = repo + .user_email() + .lookup(self.0.user_email_id) + .await? + .context("Failed to lookup user email")?; + + let user = repo + .user() + .lookup(user_email.user_id) + .await? + .context("Failed to lookup user")?; + repo.cancel().await?; + + Ok(user.username) + } + + /// The email address associated with this ticket + pub async fn email(&self, ctx: &Context<'_>) -> Result { + // We could expose the UserEmail directly, but this is unauthenticated, so we + // don't want to risk leaking too many objects. Instead, we just give + // the email as a property of the UserRecoveryTicket + let state = ctx.state(); + let mut repo = state.repository().await?; + let user_email = repo + .user_email() + .lookup(self.0.user_email_id) + .await? + .context("Failed to lookup user email")?; + repo.cancel().await?; + + Ok(user_email.email) + } +} + +/// A email authentication session +#[derive(Description)] +pub struct UserEmailAuthentication(pub mas_data_model::UserEmailAuthentication); + +#[Object(use_type_description)] +impl UserEmailAuthentication { + /// ID of the object. + pub async fn id(&self) -> ID { + NodeType::UserEmailAuthentication.id(self.0.id) + } + + /// When the object was created. + pub async fn created_at(&self) -> DateTime { + self.0.created_at + } + + /// When the object was last updated. + pub async fn completed_at(&self) -> Option> { + self.0.completed_at + } + + /// The email address associated with this session + pub async fn email(&self) -> &str { + &self.0.email + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/viewer/anonymous.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/viewer/anonymous.rs new file mode 100644 index 00000000..56506d95 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/viewer/anonymous.rs @@ -0,0 +1,18 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{ID, Object}; + +/// An anonymous viewer +#[derive(Default, Clone, Copy)] +pub struct Anonymous; + +#[Object] +impl Anonymous { + pub async fn id(&self) -> ID { + "anonymous".into() + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/model/viewer/mod.rs b/matrix-authentication-service/crates/handlers/src/graphql/model/viewer/mod.rs new file mode 100644 index 00000000..4eec8c77 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/model/viewer/mod.rs @@ -0,0 +1,51 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::Union; + +use crate::graphql::model::{BrowserSession, OAuth2Session, User}; + +mod anonymous; +pub use self::anonymous::Anonymous; + +/// Represents the current viewer +#[derive(Union)] +pub enum Viewer { + User(User), + Anonymous(Anonymous), +} + +impl Viewer { + pub fn user(user: mas_data_model::User) -> Self { + Self::User(User(user)) + } + + pub fn anonymous() -> Self { + Self::Anonymous(Anonymous) + } +} + +/// Represents the current viewer's session +#[derive(Union)] +pub enum ViewerSession { + BrowserSession(Box), + OAuth2Session(Box), + Anonymous(Anonymous), +} + +impl ViewerSession { + pub fn browser_session(session: mas_data_model::BrowserSession) -> Self { + Self::BrowserSession(Box::new(BrowserSession(session))) + } + + pub fn oauth2_session(session: mas_data_model::Session) -> Self { + Self::OAuth2Session(Box::new(OAuth2Session(session))) + } + + pub fn anonymous() -> Self { + Self::Anonymous(Anonymous) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/mutations/browser_session.rs b/matrix-authentication-service/crates/handlers/src/graphql/mutations/browser_session.rs new file mode 100644 index 00000000..551775db --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/mutations/browser_session.rs @@ -0,0 +1,102 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{Context, Enum, ID, InputObject, Object}; +use mas_storage::RepositoryAccess; + +use crate::graphql::{ + model::{BrowserSession, NodeType}, + state::ContextExt, +}; + +#[derive(Default)] +pub struct BrowserSessionMutations { + _private: (), +} + +/// The input of the `endBrowserSession` mutation. +#[derive(InputObject)] +pub struct EndBrowserSessionInput { + /// The ID of the session to end. + browser_session_id: ID, +} + +/// The payload of the `endBrowserSession` mutation. +pub enum EndBrowserSessionPayload { + NotFound, + Ended(Box), +} + +/// The status of the `endBrowserSession` mutation. +#[derive(Enum, Copy, Clone, PartialEq, Eq, Debug)] +enum EndBrowserSessionStatus { + /// The session was ended. + Ended, + + /// The session was not found. + NotFound, +} + +#[Object] +impl EndBrowserSessionPayload { + /// The status of the mutation. + async fn status(&self) -> EndBrowserSessionStatus { + match self { + Self::Ended(_) => EndBrowserSessionStatus::Ended, + Self::NotFound => EndBrowserSessionStatus::NotFound, + } + } + + /// Returns the ended session. + async fn browser_session(&self) -> Option { + match self { + Self::Ended(session) => Some(BrowserSession(*session.clone())), + Self::NotFound => None, + } + } +} + +#[Object] +impl BrowserSessionMutations { + async fn end_browser_session( + &self, + ctx: &Context<'_>, + input: EndBrowserSessionInput, + ) -> Result { + let state = ctx.state(); + let browser_session_id = + NodeType::BrowserSession.extract_ulid(&input.browser_session_id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let clock = state.clock(); + + let session = repo.browser_session().lookup(browser_session_id).await?; + + let Some(session) = session else { + return Ok(EndBrowserSessionPayload::NotFound); + }; + + if !requester.is_owner_or_admin(&session) { + return Ok(EndBrowserSessionPayload::NotFound); + } + + let session = repo.browser_session().finish(&clock, session).await?; + + repo.save().await?; + + // If we are ending the *current* session, we need to clear the session cookie + // as well + if requester + .browser_session() + .is_some_and(|s| s.id == session.id) + { + ctx.mark_session_ended(); + } + + Ok(EndBrowserSessionPayload::Ended(Box::new(session))) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/mutations/compat_session.rs b/matrix-authentication-service/crates/handlers/src/graphql/mutations/compat_session.rs new file mode 100644 index 00000000..973d46f0 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/mutations/compat_session.rs @@ -0,0 +1,201 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use async_graphql::{Context, Enum, ID, InputObject, Object}; +use mas_storage::{ + RepositoryAccess, + compat::CompatSessionRepository, + queue::{QueueJobRepositoryExt as _, SyncDevicesJob}, +}; + +use crate::graphql::{ + model::{CompatSession, NodeType}, + state::ContextExt, +}; + +#[derive(Default)] +pub struct CompatSessionMutations { + _private: (), +} + +/// The input of the `endCompatSession` mutation. +#[derive(InputObject)] +pub struct EndCompatSessionInput { + /// The ID of the session to end. + compat_session_id: ID, +} + +/// The payload of the `endCompatSession` mutation. +pub enum EndCompatSessionPayload { + NotFound, + Ended(Box), +} + +/// The status of the `endCompatSession` mutation. +#[derive(Enum, Copy, Clone, PartialEq, Eq, Debug)] +enum EndCompatSessionStatus { + /// The session was ended. + Ended, + + /// The session was not found. + NotFound, +} + +#[Object] +impl EndCompatSessionPayload { + /// The status of the mutation. + async fn status(&self) -> EndCompatSessionStatus { + match self { + Self::Ended(_) => EndCompatSessionStatus::Ended, + Self::NotFound => EndCompatSessionStatus::NotFound, + } + } + + /// Returns the ended session. + async fn compat_session(&self) -> Option { + match self { + Self::Ended(session) => Some(CompatSession::new(*session.clone())), + Self::NotFound => None, + } + } +} + +/// The input of the `setCompatSessionName` mutation. +#[derive(InputObject)] +pub struct SetCompatSessionNameInput { + /// The ID of the session to set the name of. + compat_session_id: ID, + + /// The new name of the session. + human_name: String, +} + +/// The payload of the `setCompatSessionName` mutation. +pub enum SetCompatSessionNamePayload { + /// The session was not found. + NotFound, + + /// The session was updated. + Updated(mas_data_model::CompatSession), +} + +/// The status of the `setCompatSessionName` mutation. +#[derive(Enum, Copy, Clone, PartialEq, Eq, Debug)] +enum SetCompatSessionNameStatus { + /// The session was updated. + Updated, + + /// The session was not found. + NotFound, +} + +#[Object] +impl SetCompatSessionNamePayload { + /// The status of the mutation. + async fn status(&self) -> SetCompatSessionNameStatus { + match self { + Self::Updated(_) => SetCompatSessionNameStatus::Updated, + Self::NotFound => SetCompatSessionNameStatus::NotFound, + } + } + + /// The session that was updated. + async fn oauth2_session(&self) -> Option { + match self { + Self::Updated(session) => Some(CompatSession::new(session.clone())), + Self::NotFound => None, + } + } +} + +#[Object] +impl CompatSessionMutations { + async fn end_compat_session( + &self, + ctx: &Context<'_>, + input: EndCompatSessionInput, + ) -> Result { + let state = ctx.state(); + let mut rng = state.rng(); + let compat_session_id = NodeType::CompatSession.extract_ulid(&input.compat_session_id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let clock = state.clock(); + + let session = repo.compat_session().lookup(compat_session_id).await?; + let Some(session) = session else { + return Ok(EndCompatSessionPayload::NotFound); + }; + + if !requester.is_owner_or_admin(&session) { + return Ok(EndCompatSessionPayload::NotFound); + } + + let user = repo + .user() + .lookup(session.user_id) + .await? + .context("Could not load user")?; + + // Schedule a job to sync the devices of the user with the homeserver + repo.queue_job() + .schedule_job(&mut rng, &clock, SyncDevicesJob::new(&user)) + .await?; + + let session = repo.compat_session().finish(&clock, session).await?; + + repo.save().await?; + + Ok(EndCompatSessionPayload::Ended(Box::new(session))) + } + + async fn set_compat_session_name( + &self, + ctx: &Context<'_>, + input: SetCompatSessionNameInput, + ) -> Result { + let state = ctx.state(); + let compat_session_id = NodeType::CompatSession.extract_ulid(&input.compat_session_id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let homeserver = state.homeserver_connection(); + + let session = repo.compat_session().lookup(compat_session_id).await?; + let Some(session) = session else { + return Ok(SetCompatSessionNamePayload::NotFound); + }; + + if !requester.is_owner_or_admin(&session) { + return Ok(SetCompatSessionNamePayload::NotFound); + } + + let user = repo + .user() + .lookup(session.user_id) + .await? + .context("User not found")?; + + let session = repo + .compat_session() + .set_human_name(session, Some(input.human_name.clone())) + .await?; + + // Update the device on the homeserver side + if let Some(device) = session.device.as_ref() { + homeserver + .update_device_display_name(&user.username, device.as_str(), &input.human_name) + .await + .context("Failed to provision device")?; + } + + repo.save().await?; + + Ok(SetCompatSessionNamePayload::Updated(session)) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/mutations/matrix.rs b/matrix-authentication-service/crates/handlers/src/graphql/mutations/matrix.rs new file mode 100644 index 00000000..f88668e2 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/mutations/matrix.rs @@ -0,0 +1,118 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use async_graphql::{Context, Description, Enum, ID, InputObject, Object}; + +use crate::graphql::{ + UserId, + model::{NodeType, User}, + state::ContextExt, +}; + +#[derive(Default)] +pub struct MatrixMutations { + _private: (), +} + +/// The input for the `addEmail` mutation +#[derive(InputObject)] +struct SetDisplayNameInput { + /// The ID of the user to add the email address to + user_id: ID, + + /// The display name to set. If `None`, the display name will be removed. + display_name: Option, +} + +/// The status of the `setDisplayName` mutation +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum SetDisplayNameStatus { + /// The display name was set + Set, + /// The display name is invalid + Invalid, +} + +/// The payload of the `setDisplayName` mutation +#[derive(Description)] +enum SetDisplayNamePayload { + Set(User), + Invalid, +} + +#[Object(use_type_description)] +impl SetDisplayNamePayload { + /// Status of the operation + async fn status(&self) -> SetDisplayNameStatus { + match self { + SetDisplayNamePayload::Set(_) => SetDisplayNameStatus::Set, + SetDisplayNamePayload::Invalid => SetDisplayNameStatus::Invalid, + } + } + + /// The user that was updated + async fn user(&self) -> Option<&User> { + match self { + SetDisplayNamePayload::Set(user) => Some(user), + SetDisplayNamePayload::Invalid => None, + } + } +} + +#[Object] +impl MatrixMutations { + /// Set the display name of a user + async fn set_display_name( + &self, + ctx: &Context<'_>, + input: SetDisplayNameInput, + ) -> Result { + let state = ctx.state(); + let id = NodeType::User.extract_ulid(&input.user_id)?; + let requester = ctx.requester(); + + if !requester.is_owner_or_admin(&UserId(id)) { + return Err(async_graphql::Error::new("Unauthorized")); + } + + // Allow non-admins to change their display name if the site config allows it + if !requester.is_admin() && !state.site_config().displayname_change_allowed { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let mut repo = state.repository().await?; + let user = repo + .user() + .lookup(id) + .await? + .context("Failed to lookup user")?; + repo.cancel().await?; + + let conn = state.homeserver_connection(); + + if let Some(display_name) = &input.display_name { + // Let's do some basic validation on the display name + if display_name.len() > 256 { + return Ok(SetDisplayNamePayload::Invalid); + } + + if display_name.is_empty() { + return Ok(SetDisplayNamePayload::Invalid); + } + + conn.set_displayname(&user.username, display_name) + .await + .context("Failed to set display name")?; + } else { + conn.unset_displayname(&user.username) + .await + .context("Failed to unset display name")?; + } + + Ok(SetDisplayNamePayload::Set(User(user.clone()))) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/mutations/mod.rs b/matrix-authentication-service/crates/handlers/src/graphql/mutations/mod.rs new file mode 100644 index 00000000..a84bf921 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/mutations/mod.rs @@ -0,0 +1,90 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod browser_session; +mod compat_session; +mod matrix; +mod oauth2_session; +mod user; +mod user_email; + +use anyhow::Context as _; +use async_graphql::MergedObject; +use mas_data_model::SiteConfig; +use mas_storage::BoxRepository; +use zeroize::Zeroizing; + +use super::Requester; +use crate::passwords::PasswordManager; + +/// The mutations root of the GraphQL interface. +#[derive(Default, MergedObject)] +pub struct Mutation( + user_email::UserEmailMutations, + user::UserMutations, + oauth2_session::OAuth2SessionMutations, + compat_session::CompatSessionMutations, + browser_session::BrowserSessionMutations, + matrix::MatrixMutations, +); + +impl Mutation { + #[must_use] + pub fn new() -> Self { + Self::default() + } +} + +/// Check the password if neeed +/// +/// Returns true if password verification is not needed, or if the password is +/// correct. Returns false if the password is incorrect or missing. +async fn verify_password_if_needed( + requester: &Requester, + config: &SiteConfig, + password_manager: &PasswordManager, + password: Option, + user: &mas_data_model::User, + repo: &mut BoxRepository, +) -> Result { + // If the requester is admin, they don't need to provide a password + if requester.is_admin() { + return Ok(true); + } + + // If password login is disabled, assume we don't want the user to reauth + if !config.password_login_enabled { + return Ok(true); + } + + // Else we need to check if the user has a password + let Some(user_password) = repo + .user_password() + .active(user) + .await + .context("Failed to load user password")? + else { + // User has no password, so we don't need to verify the password + return Ok(true); + }; + + let Some(password) = password else { + // There is a password on the user, but not provided in the input + return Ok(false); + }; + + let password = Zeroizing::new(password); + + let res = password_manager + .verify( + user_password.version, + password, + user_password.hashed_password, + ) + .await?; + + Ok(res.is_success()) +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/mutations/oauth2_session.rs b/matrix-authentication-service/crates/handlers/src/graphql/mutations/oauth2_session.rs new file mode 100644 index 00000000..55723efc --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/mutations/oauth2_session.rs @@ -0,0 +1,346 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use async_graphql::{Context, Description, Enum, ID, InputObject, Object}; +use chrono::Duration; +use mas_data_model::{Device, TokenType}; +use mas_storage::{ + RepositoryAccess, + oauth2::{ + OAuth2AccessTokenRepository, OAuth2ClientRepository, OAuth2RefreshTokenRepository, + OAuth2SessionRepository, + }, + queue::{QueueJobRepositoryExt as _, SyncDevicesJob}, + user::UserRepository, +}; +use oauth2_types::scope::Scope; + +use crate::graphql::{ + model::{NodeType, OAuth2Session}, + state::ContextExt, +}; + +#[derive(Default)] +pub struct OAuth2SessionMutations { + _private: (), +} + +/// The input of the `createOauth2Session` mutation. +#[derive(InputObject)] +pub struct CreateOAuth2SessionInput { + /// The scope of the session + scope: String, + + /// The ID of the user for which to create the session + user_id: ID, + + /// Whether the session should issue a never-expiring access token + permanent: Option, +} + +/// The payload of the `createOauth2Session` mutation. +#[derive(Description)] +pub struct CreateOAuth2SessionPayload { + access_token: String, + refresh_token: Option, + session: mas_data_model::Session, +} + +#[Object(use_type_description)] +impl CreateOAuth2SessionPayload { + /// Access token for this session + pub async fn access_token(&self) -> &str { + &self.access_token + } + + /// Refresh token for this session, if it is not a permanent session + pub async fn refresh_token(&self) -> Option<&str> { + self.refresh_token.as_deref() + } + + /// The OAuth 2.0 session which was just created + pub async fn oauth2_session(&self) -> OAuth2Session { + OAuth2Session(self.session.clone()) + } +} + +/// The input of the `endOauth2Session` mutation. +#[derive(InputObject)] +pub struct EndOAuth2SessionInput { + /// The ID of the session to end. + oauth2_session_id: ID, +} + +/// The payload of the `endOauth2Session` mutation. +pub enum EndOAuth2SessionPayload { + NotFound, + Ended(Box), +} + +/// The status of the `endOauth2Session` mutation. +#[derive(Enum, Copy, Clone, PartialEq, Eq, Debug)] +enum EndOAuth2SessionStatus { + /// The session was ended. + Ended, + + /// The session was not found. + NotFound, +} + +#[Object] +impl EndOAuth2SessionPayload { + /// The status of the mutation. + async fn status(&self) -> EndOAuth2SessionStatus { + match self { + Self::Ended(_) => EndOAuth2SessionStatus::Ended, + Self::NotFound => EndOAuth2SessionStatus::NotFound, + } + } + + /// Returns the ended session. + async fn oauth2_session(&self) -> Option { + match self { + Self::Ended(session) => Some(OAuth2Session(*session.clone())), + Self::NotFound => None, + } + } +} + +/// The input of the `setOauth2SessionName` mutation. +#[derive(InputObject)] +pub struct SetOAuth2SessionNameInput { + /// The ID of the session to set the name of. + oauth2_session_id: ID, + + /// The new name of the session. + human_name: String, +} + +/// The payload of the `setOauth2SessionName` mutation. +pub enum SetOAuth2SessionNamePayload { + /// The session was not found. + NotFound, + + /// The session was updated. + Updated(Box), +} + +/// The status of the `setOauth2SessionName` mutation. +#[derive(Enum, Copy, Clone, PartialEq, Eq, Debug)] +enum SetOAuth2SessionNameStatus { + /// The session was updated. + Updated, + + /// The session was not found. + NotFound, +} + +#[Object] +impl SetOAuth2SessionNamePayload { + /// The status of the mutation. + async fn status(&self) -> SetOAuth2SessionNameStatus { + match self { + Self::Updated(_) => SetOAuth2SessionNameStatus::Updated, + Self::NotFound => SetOAuth2SessionNameStatus::NotFound, + } + } + + /// The session that was updated. + async fn oauth2_session(&self) -> Option { + match self { + Self::Updated(session) => Some(OAuth2Session(*session.clone())), + Self::NotFound => None, + } + } +} + +#[Object] +impl OAuth2SessionMutations { + /// Create a new arbitrary OAuth 2.0 Session. + /// + /// Only available for administrators. + async fn create_oauth2_session( + &self, + ctx: &Context<'_>, + input: CreateOAuth2SessionInput, + ) -> Result { + let state = ctx.state(); + let homeserver = state.homeserver_connection(); + let user_id = NodeType::User.extract_ulid(&input.user_id)?; + let scope: Scope = input.scope.parse().context("Invalid scope")?; + let permanent = input.permanent.unwrap_or(false); + let requester = ctx.requester(); + + if !requester.is_admin() { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let session = requester + .oauth2_session() + .context("Requester should be a OAuth 2.0 client")?; + + let mut repo = state.repository().await?; + let clock = state.clock(); + let mut rng = state.rng(); + + let client = repo + .oauth2_client() + .lookup(session.client_id) + .await? + .context("Client not found")?; + + let user = repo + .user() + .lookup(user_id) + .await? + .context("User not found")?; + + // Generate a new access token + let access_token = TokenType::AccessToken.generate(&mut rng); + + // Create the OAuth 2.0 Session + let session = repo + .oauth2_session() + .add(&mut rng, &clock, &client, Some(&user), None, scope) + .await?; + + // Lock the user sync to make sure we don't get into a race condition + repo.user().acquire_lock_for_sync(&user).await?; + + // Look for devices to provision + for scope in &*session.scope { + if let Some(device) = Device::from_scope_token(scope) { + homeserver + .upsert_device(&user.username, device.as_str(), None) + .await + .context("Failed to provision device")?; + } + } + + let ttl = if permanent { + None + } else { + Some(Duration::microseconds(5 * 60 * 1000 * 1000)) + }; + let access_token = repo + .oauth2_access_token() + .add(&mut rng, &clock, &session, access_token, ttl) + .await?; + + let refresh_token = if permanent { + None + } else { + let refresh_token = TokenType::RefreshToken.generate(&mut rng); + + let refresh_token = repo + .oauth2_refresh_token() + .add(&mut rng, &clock, &session, &access_token, refresh_token) + .await?; + + Some(refresh_token) + }; + + repo.save().await?; + + Ok(CreateOAuth2SessionPayload { + session, + access_token: access_token.access_token, + refresh_token: refresh_token.map(|t| t.refresh_token), + }) + } + + async fn end_oauth2_session( + &self, + ctx: &Context<'_>, + input: EndOAuth2SessionInput, + ) -> Result { + let state = ctx.state(); + let oauth2_session_id = NodeType::OAuth2Session.extract_ulid(&input.oauth2_session_id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let clock = state.clock(); + let mut rng = state.rng(); + + let session = repo.oauth2_session().lookup(oauth2_session_id).await?; + let Some(session) = session else { + return Ok(EndOAuth2SessionPayload::NotFound); + }; + + if !requester.is_owner_or_admin(&session) { + return Ok(EndOAuth2SessionPayload::NotFound); + } + + if let Some(user_id) = session.user_id { + let user = repo + .user() + .lookup(user_id) + .await? + .context("Could not load user")?; + + // Schedule a job to sync the devices of the user with the homeserver + repo.queue_job() + .schedule_job(&mut rng, &clock, SyncDevicesJob::new(&user)) + .await?; + } + + let session = repo.oauth2_session().finish(&clock, session).await?; + + repo.save().await?; + + Ok(EndOAuth2SessionPayload::Ended(Box::new(session))) + } + + async fn set_oauth2_session_name( + &self, + ctx: &Context<'_>, + input: SetOAuth2SessionNameInput, + ) -> Result { + let state = ctx.state(); + let oauth2_session_id = NodeType::OAuth2Session.extract_ulid(&input.oauth2_session_id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let homeserver = state.homeserver_connection(); + + let session = repo.oauth2_session().lookup(oauth2_session_id).await?; + let Some(session) = session else { + return Ok(SetOAuth2SessionNamePayload::NotFound); + }; + + if !requester.is_owner_or_admin(&session) { + return Ok(SetOAuth2SessionNamePayload::NotFound); + } + + let user_id = session.user_id.context("Session has no user")?; + + let user = repo + .user() + .lookup(user_id) + .await? + .context("User not found")?; + + let session = repo + .oauth2_session() + .set_human_name(session, Some(input.human_name.clone())) + .await?; + + // Update the device on the homeserver side + for scope in &*session.scope { + if let Some(device) = Device::from_scope_token(scope) { + homeserver + .update_device_display_name(&user.username, device.as_str(), &input.human_name) + .await + .context("Failed to provision device")?; + } + } + + repo.save().await?; + + Ok(SetOAuth2SessionNamePayload::Updated(Box::new(session))) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/mutations/user.rs b/matrix-authentication-service/crates/handlers/src/graphql/mutations/user.rs new file mode 100644 index 00000000..355c7d0a --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/mutations/user.rs @@ -0,0 +1,986 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use async_graphql::{Context, Description, Enum, ID, InputObject, Object}; +use mas_storage::{ + queue::{ + DeactivateUserJob, ProvisionUserJob, QueueJobRepositoryExt as _, + SendAccountRecoveryEmailsJob, + }, + user::UserRepository, +}; +use tracing::{info, warn}; +use ulid::Ulid; +use url::Url; +use zeroize::Zeroizing; + +use super::verify_password_if_needed; +use crate::graphql::{ + UserId, + model::{NodeType, User}, + state::ContextExt, +}; + +#[derive(Default)] +pub struct UserMutations { + _private: (), +} + +/// The input for the `addUser` mutation. +#[derive(InputObject)] +struct AddUserInput { + /// The username of the user to add. + username: String, + + /// Skip checking with the homeserver whether the username is valid. + /// + /// Use this with caution! The main reason to use this, is when a user used + /// by an application service needs to exist in MAS to craft special + /// tokens (like with admin access) for them + skip_homeserver_check: Option, +} + +/// The status of the `addUser` mutation. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum AddUserStatus { + /// The user was added. + Added, + + /// The user already exists. + Exists, + + /// The username is reserved. + Reserved, + + /// The username is invalid. + Invalid, +} + +/// The payload for the `addUser` mutation. +#[derive(Description)] +enum AddUserPayload { + Added(mas_data_model::User), + Exists(mas_data_model::User), + Reserved, + Invalid, +} + +#[Object(use_type_description)] +impl AddUserPayload { + /// Status of the operation + async fn status(&self) -> AddUserStatus { + match self { + Self::Added(_) => AddUserStatus::Added, + Self::Exists(_) => AddUserStatus::Exists, + Self::Reserved => AddUserStatus::Reserved, + Self::Invalid => AddUserStatus::Invalid, + } + } + + /// The user that was added. + async fn user(&self) -> Option { + match self { + Self::Added(user) | Self::Exists(user) => Some(User(user.clone())), + Self::Invalid | Self::Reserved => None, + } + } +} + +/// The input for the `lockUser` mutation. +#[derive(InputObject)] +struct LockUserInput { + /// The ID of the user to lock. + user_id: ID, + + /// Permanently lock the user. + deactivate: Option, +} + +/// The status of the `lockUser` mutation. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum LockUserStatus { + /// The user was locked. + Locked, + + /// The user was not found. + NotFound, +} + +/// The payload for the `lockUser` mutation. +#[derive(Description)] +enum LockUserPayload { + /// The user was locked. + Locked(mas_data_model::User), + + /// The user was not found. + NotFound, +} + +#[Object(use_type_description)] +impl LockUserPayload { + /// Status of the operation + async fn status(&self) -> LockUserStatus { + match self { + Self::Locked(_) => LockUserStatus::Locked, + Self::NotFound => LockUserStatus::NotFound, + } + } + + /// The user that was locked. + async fn user(&self) -> Option { + match self { + Self::Locked(user) => Some(User(user.clone())), + Self::NotFound => None, + } + } +} + +/// The input for the `unlockUser` mutation. +#[derive(InputObject)] +struct UnlockUserInput { + /// The ID of the user to unlock + user_id: ID, +} + +/// The status of the `unlockUser` mutation. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum UnlockUserStatus { + /// The user was unlocked. + Unlocked, + + /// The user was not found. + NotFound, +} + +/// The payload for the `unlockUser` mutation. +#[derive(Description)] +enum UnlockUserPayload { + /// The user was unlocked. + Unlocked(mas_data_model::User), + + /// The user was not found. + NotFound, +} + +#[Object(use_type_description)] +impl UnlockUserPayload { + /// Status of the operation + async fn status(&self) -> UnlockUserStatus { + match self { + Self::Unlocked(_) => UnlockUserStatus::Unlocked, + Self::NotFound => UnlockUserStatus::NotFound, + } + } + + /// The user that was unlocked. + async fn user(&self) -> Option { + match self { + Self::Unlocked(user) => Some(User(user.clone())), + Self::NotFound => None, + } + } +} + +/// The input for the `setCanRequestAdmin` mutation. +#[derive(InputObject)] +struct SetCanRequestAdminInput { + /// The ID of the user to update. + user_id: ID, + + /// Whether the user can request admin. + can_request_admin: bool, +} + +/// The payload for the `setCanRequestAdmin` mutation. +#[derive(Description)] +enum SetCanRequestAdminPayload { + /// The user was updated. + Updated(mas_data_model::User), + + /// The user was not found. + NotFound, +} + +#[Object(use_type_description)] +impl SetCanRequestAdminPayload { + /// The user that was updated. + async fn user(&self) -> Option { + match self { + Self::Updated(user) => Some(User(user.clone())), + Self::NotFound => None, + } + } +} + +/// The input for the `allowUserCrossSigningReset` mutation. +#[derive(InputObject)] +struct AllowUserCrossSigningResetInput { + /// The ID of the user to update. + user_id: ID, +} + +/// The payload for the `allowUserCrossSigningReset` mutation. +#[derive(Description)] +enum AllowUserCrossSigningResetPayload { + /// The user was updated. + Allowed(mas_data_model::User), + + /// The user was not found. + NotFound, +} + +#[Object(use_type_description)] +impl AllowUserCrossSigningResetPayload { + /// The user that was updated. + async fn user(&self) -> Option { + match self { + Self::Allowed(user) => Some(User(user.clone())), + Self::NotFound => None, + } + } +} + +/// The input for the `setPassword` mutation. +#[derive(InputObject)] +struct SetPasswordInput { + /// The ID of the user to set the password for. + /// If you are not a server administrator then this must be your own user + /// ID. + user_id: ID, + + /// The current password of the user. + /// Required if you are not a server administrator. + current_password: Option, + + /// The new password for the user. + new_password: String, +} + +/// The input for the `setPasswordByRecovery` mutation. +#[derive(InputObject)] +struct SetPasswordByRecoveryInput { + /// The recovery ticket to use. + /// This identifies the user as well as proving authorisation to perform the + /// recovery operation. + ticket: String, + + /// The new password for the user. + new_password: String, +} + +/// The return type for the `setPassword` mutation. +#[derive(Description)] +struct SetPasswordPayload { + status: SetPasswordStatus, +} + +/// The status of the `setPassword` mutation. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum SetPasswordStatus { + /// The password was updated. + Allowed, + + /// The user was not found. + NotFound, + + /// The user doesn't have a current password to attempt to match against. + NoCurrentPassword, + + /// The supplied current password was wrong. + WrongPassword, + + /// The new password is invalid. For example, it may not meet configured + /// security requirements. + InvalidNewPassword, + + /// You aren't allowed to set the password for that user. + /// This happens if you aren't setting your own password and you aren't a + /// server administrator. + NotAllowed, + + /// Password support has been disabled. + /// This usually means that login is handled by an upstream identity + /// provider. + PasswordChangesDisabled, + + /// The specified recovery ticket does not exist. + NoSuchRecoveryTicket, + + /// The specified recovery ticket has already been used and cannot be used + /// again. + RecoveryTicketAlreadyUsed, + + /// The specified recovery ticket has expired. + ExpiredRecoveryTicket, + + /// Your account is locked and you can't change its password. + AccountLocked, +} + +#[Object(use_type_description)] +impl SetPasswordPayload { + /// Status of the operation + async fn status(&self) -> SetPasswordStatus { + self.status + } +} + +/// The input for the `resendRecoveryEmail` mutation. +#[derive(InputObject)] +pub struct ResendRecoveryEmailInput { + /// The recovery ticket to use. + ticket: String, +} + +/// The return type for the `resendRecoveryEmail` mutation. +#[derive(Description)] +pub enum ResendRecoveryEmailPayload { + NoSuchRecoveryTicket, + RateLimited, + Sent { recovery_session_id: Ulid }, +} + +/// The status of the `resendRecoveryEmail` mutation. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum ResendRecoveryEmailStatus { + /// The recovery ticket was not found. + NoSuchRecoveryTicket, + + /// The rate limit was exceeded. + RateLimited, + + /// The recovery email was sent. + Sent, +} + +#[Object(use_type_description)] +impl ResendRecoveryEmailPayload { + /// Status of the operation + async fn status(&self) -> ResendRecoveryEmailStatus { + match self { + Self::NoSuchRecoveryTicket => ResendRecoveryEmailStatus::NoSuchRecoveryTicket, + Self::RateLimited => ResendRecoveryEmailStatus::RateLimited, + Self::Sent { .. } => ResendRecoveryEmailStatus::Sent, + } + } + + /// URL to continue the recovery process + async fn progress_url(&self, context: &Context<'_>) -> Option { + let state = context.state(); + let url_builder = state.url_builder(); + match self { + Self::NoSuchRecoveryTicket | Self::RateLimited => None, + Self::Sent { + recovery_session_id, + } => { + let route = mas_router::AccountRecoveryProgress::new(*recovery_session_id); + Some(url_builder.absolute_url_for(&route)) + } + } + } +} + +/// The input for the `deactivateUser` mutation. +#[derive(InputObject)] +pub struct DeactivateUserInput { + /// Whether to ask the homeserver to GDPR-erase the user + /// + /// This is equivalent to the `erase` parameter on the + /// `/_matrix/client/v3/account/deactivate` C-S API, which is + /// implementation-specific. + /// + /// What Synapse does is documented here: + /// + hs_erase: bool, + + /// The password of the user to deactivate. + password: Option, +} + +/// The payload for the `deactivateUser` mutation. +#[derive(Description)] +pub enum DeactivateUserPayload { + /// The user was deactivated. + Deactivated(mas_data_model::User), + + /// The password was wrong or missing. + IncorrectPassword, +} + +/// The status of the `deactivateUser` mutation. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum DeactivateUserStatus { + /// The user was deactivated. + Deactivated, + + /// The password was wrong. + IncorrectPassword, +} + +#[Object(use_type_description)] +impl DeactivateUserPayload { + /// Status of the operation + async fn status(&self) -> DeactivateUserStatus { + match self { + Self::Deactivated(_) => DeactivateUserStatus::Deactivated, + Self::IncorrectPassword => DeactivateUserStatus::IncorrectPassword, + } + } + + async fn user(&self) -> Option { + match self { + Self::Deactivated(user) => Some(User(user.clone())), + Self::IncorrectPassword => None, + } + } +} + +fn valid_username_character(c: char) -> bool { + c.is_ascii_lowercase() + || c.is_ascii_digit() + || c == '=' + || c == '_' + || c == '-' + || c == '.' + || c == '/' + || c == '+' +} + +// XXX: this should probably be moved somewhere else +fn username_valid(username: &str) -> bool { + if username.is_empty() || username.len() > 255 { + return false; + } + + // Should not start with an underscore + if username.starts_with('_') { + return false; + } + + // Should only contain valid characters + if !username.chars().all(valid_username_character) { + return false; + } + + true +} + +#[Object] +impl UserMutations { + /// Add a user. This is only available to administrators. + async fn add_user( + &self, + ctx: &Context<'_>, + input: AddUserInput, + ) -> Result { + let state = ctx.state(); + let requester = ctx.requester(); + let clock = state.clock(); + let mut rng = state.rng(); + + if !requester.is_admin() { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let mut repo = state.repository().await?; + + if let Some(user) = repo.user().find_by_username(&input.username).await? { + return Ok(AddUserPayload::Exists(user)); + } + + // Do some basic check on the username + if !username_valid(&input.username) { + return Ok(AddUserPayload::Invalid); + } + + // Ask the homeserver if the username is available + let homeserver_available = state + .homeserver_connection() + .is_localpart_available(&input.username) + .await?; + + if !homeserver_available { + if !input.skip_homeserver_check.unwrap_or(false) { + return Ok(AddUserPayload::Reserved); + } + + // If we skipped the check, we still want to shout about it + warn!("Skipped homeserver check for username {}", input.username); + } + + let user = repo.user().add(&mut rng, &clock, input.username).await?; + + repo.queue_job() + .schedule_job(&mut rng, &clock, ProvisionUserJob::new(&user)) + .await?; + + repo.save().await?; + + Ok(AddUserPayload::Added(user)) + } + + /// Lock a user. This is only available to administrators. + async fn lock_user( + &self, + ctx: &Context<'_>, + input: LockUserInput, + ) -> Result { + let state = ctx.state(); + let clock = state.clock(); + let mut rng = state.rng(); + let requester = ctx.requester(); + + if !requester.is_admin() { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let mut repo = state.repository().await?; + + let user_id = NodeType::User.extract_ulid(&input.user_id)?; + let user = repo.user().lookup(user_id).await?; + + let Some(user) = user else { + return Ok(LockUserPayload::NotFound); + }; + + let deactivate = input.deactivate.unwrap_or(false); + + let user = repo.user().lock(&state.clock(), user).await?; + + if deactivate { + info!(%user.id, "Scheduling deactivation of user"); + repo.queue_job() + .schedule_job(&mut rng, &clock, DeactivateUserJob::new(&user, deactivate)) + .await?; + } + + repo.save().await?; + + Ok(LockUserPayload::Locked(user)) + } + + /// Unlock and reactivate a user. This is only available to administrators. + async fn unlock_user( + &self, + ctx: &Context<'_>, + input: UnlockUserInput, + ) -> Result { + let state = ctx.state(); + let requester = ctx.requester(); + let matrix = state.homeserver_connection(); + + if !requester.is_admin() { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let mut repo = state.repository().await?; + let user_id = NodeType::User.extract_ulid(&input.user_id)?; + let user = repo.user().lookup(user_id).await?; + + let Some(user) = user else { + return Ok(UnlockUserPayload::NotFound); + }; + + // Call the homeserver synchronously to reactivate the user + matrix.reactivate_user(&user.username).await?; + + // Now reactivate & unlock the user in our database + let user = repo.user().reactivate(user).await?; + let user = repo.user().unlock(user).await?; + + repo.save().await?; + + Ok(UnlockUserPayload::Unlocked(user)) + } + + /// Set whether a user can request admin. This is only available to + /// administrators. + async fn set_can_request_admin( + &self, + ctx: &Context<'_>, + input: SetCanRequestAdminInput, + ) -> Result { + let state = ctx.state(); + let requester = ctx.requester(); + + if !requester.is_admin() { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let mut repo = state.repository().await?; + + let user_id = NodeType::User.extract_ulid(&input.user_id)?; + let user = repo.user().lookup(user_id).await?; + + let Some(user) = user else { + return Ok(SetCanRequestAdminPayload::NotFound); + }; + + let user = repo + .user() + .set_can_request_admin(user, input.can_request_admin) + .await?; + + repo.save().await?; + + Ok(SetCanRequestAdminPayload::Updated(user)) + } + + /// Temporarily allow user to reset their cross-signing keys. + async fn allow_user_cross_signing_reset( + &self, + ctx: &Context<'_>, + input: AllowUserCrossSigningResetInput, + ) -> Result { + let state = ctx.state(); + let user_id = NodeType::User.extract_ulid(&input.user_id)?; + let requester = ctx.requester(); + + if !requester.is_owner_or_admin(&UserId(user_id)) { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let mut repo = state.repository().await?; + let user = repo.user().lookup(user_id).await?; + repo.cancel().await?; + + let Some(user) = user else { + return Ok(AllowUserCrossSigningResetPayload::NotFound); + }; + + let conn = state.homeserver_connection(); + conn.allow_cross_signing_reset(&user.username) + .await + .context("Failed to allow cross-signing reset")?; + + Ok(AllowUserCrossSigningResetPayload::Allowed(user)) + } + + /// Set the password for a user. + /// + /// This can be used by server administrators to set any user's password, + /// or, provided the capability hasn't been disabled on this server, + /// by a user to change their own password as long as they know their + /// current password. + async fn set_password( + &self, + ctx: &Context<'_>, + input: SetPasswordInput, + ) -> Result { + let state = ctx.state(); + let user_id = NodeType::User.extract_ulid(&input.user_id)?; + let requester = ctx.requester(); + + if !requester.is_owner_or_admin(&UserId(user_id)) { + return Err(async_graphql::Error::new("Unauthorized")); + } + + if input.new_password.is_empty() { + // TODO Expose the reason for the policy violation + // This involves redesigning the error handling + // Idea would be to expose an errors array in the response, + // with a list of union of different error kinds. + return Ok(SetPasswordPayload { + status: SetPasswordStatus::InvalidNewPassword, + }); + } + + let password_manager = state.password_manager(); + + if !password_manager.is_enabled() { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::PasswordChangesDisabled, + }); + } + + if !password_manager.is_password_complex_enough(&input.new_password)? { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::InvalidNewPassword, + }); + } + + let mut repo = state.repository().await?; + let Some(user) = repo.user().lookup(user_id).await? else { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::NotFound, + }); + }; + + if !requester.is_admin() { + // If the user isn't an admin, we: + // - check that password changes are enabled + // - check that they know their current password + + if !state.site_config().password_change_allowed { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::PasswordChangesDisabled, + }); + } + + let Some(active_password) = repo.user_password().active(&user).await? else { + // The user has no current password, so can't verify against one. + // In the future, it may be desirable to let the user set a password without any + // other verification instead. + + return Ok(SetPasswordPayload { + status: SetPasswordStatus::NoCurrentPassword, + }); + }; + + let Some(current_password_attempt) = input.current_password else { + return Err(async_graphql::Error::new( + "You must supply `currentPassword` to change your own password if you are not an administrator", + )); + }; + + if !password_manager + .verify( + active_password.version, + Zeroizing::new(current_password_attempt), + active_password.hashed_password, + ) + .await? + .is_success() + { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::WrongPassword, + }); + } + } + + let (new_password_version, new_password_hash) = password_manager + .hash(state.rng(), Zeroizing::new(input.new_password)) + .await?; + + repo.user_password() + .add( + &mut state.rng(), + &state.clock(), + &user, + new_password_version, + new_password_hash, + None, + ) + .await?; + + repo.save().await?; + + Ok(SetPasswordPayload { + status: SetPasswordStatus::Allowed, + }) + } + + /// Set the password for yourself, using a recovery ticket sent by e-mail. + async fn set_password_by_recovery( + &self, + ctx: &Context<'_>, + input: SetPasswordByRecoveryInput, + ) -> Result { + let state = ctx.state(); + let requester = ctx.requester(); + let clock = state.clock(); + if !requester.is_unauthenticated() { + return Err(async_graphql::Error::new( + "Account recovery is only for anonymous users.", + )); + } + + let password_manager = state.password_manager(); + + if !password_manager.is_enabled() || !state.site_config().account_recovery_allowed { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::PasswordChangesDisabled, + }); + } + + if !password_manager.is_password_complex_enough(&input.new_password)? { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::InvalidNewPassword, + }); + } + + let mut repo = state.repository().await?; + + let Some(ticket) = repo.user_recovery().find_ticket(&input.ticket).await? else { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::NoSuchRecoveryTicket, + }); + }; + + let session = repo + .user_recovery() + .lookup_session(ticket.user_recovery_session_id) + .await? + .context("Unknown session")?; + + if session.consumed_at.is_some() { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::RecoveryTicketAlreadyUsed, + }); + } + + if !ticket.active(clock.now()) { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::ExpiredRecoveryTicket, + }); + } + + let user_email = repo + .user_email() + .lookup(ticket.user_email_id) + .await? + .context("Unknown email address")?; + + let user = repo + .user() + .lookup(user_email.user_id) + .await? + .context("Invalid user")?; + + if !user.is_valid() { + return Ok(SetPasswordPayload { + status: SetPasswordStatus::AccountLocked, + }); + } + + let (new_password_version, new_password_hash) = password_manager + .hash(state.rng(), Zeroizing::new(input.new_password)) + .await?; + + repo.user_password() + .add( + &mut state.rng(), + &state.clock(), + &user, + new_password_version, + new_password_hash, + None, + ) + .await?; + + // Mark the session as consumed + repo.user_recovery() + .consume_ticket(&clock, ticket, session) + .await?; + + repo.save().await?; + + Ok(SetPasswordPayload { + status: SetPasswordStatus::Allowed, + }) + } + + /// Resend a user recovery email + /// + /// This is used when a user opens a recovery link that has expired. In this + /// case, we display a link for them to get a new recovery email, which + /// calls this mutation. + pub async fn resend_recovery_email( + &self, + ctx: &Context<'_>, + input: ResendRecoveryEmailInput, + ) -> Result { + let state = ctx.state(); + let requester = ctx.requester(); + let clock = state.clock(); + let mut rng = state.rng(); + let limiter = state.limiter(); + let mut repo = state.repository().await?; + + let Some(recovery_ticket) = repo.user_recovery().find_ticket(&input.ticket).await? else { + return Ok(ResendRecoveryEmailPayload::NoSuchRecoveryTicket); + }; + + let recovery_session = repo + .user_recovery() + .lookup_session(recovery_ticket.user_recovery_session_id) + .await? + .context("Could not load recovery session")?; + + if let Err(e) = + limiter.check_account_recovery(requester.fingerprint(), &recovery_session.email) + { + tracing::warn!(error = &e as &dyn std::error::Error); + return Ok(ResendRecoveryEmailPayload::RateLimited); + } + + // Schedule a new batch of emails + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + SendAccountRecoveryEmailsJob::new(&recovery_session), + ) + .await?; + + repo.save().await?; + + Ok(ResendRecoveryEmailPayload::Sent { + recovery_session_id: recovery_session.id, + }) + } + + /// Deactivate the current user account + /// + /// If the user has a password, it *must* be supplied in the `password` + /// field. + async fn deactivate_user( + &self, + ctx: &Context<'_>, + input: DeactivateUserInput, + ) -> Result { + let state = ctx.state(); + let mut rng = state.rng(); + let clock = state.clock(); + let requester = ctx.requester(); + let site_config = state.site_config(); + + // Only allow calling this if the requester is a browser session + let Some(browser_session) = requester.browser_session() else { + return Err(async_graphql::Error::new("Unauthorized")); + }; + + if !site_config.account_deactivation_allowed { + return Err(async_graphql::Error::new( + "Account deactivation is not allowed on this server", + )); + } + + let mut repo = state.repository().await?; + if !verify_password_if_needed( + requester, + site_config, + &state.password_manager(), + input.password, + &browser_session.user, + &mut repo, + ) + .await? + { + return Ok(DeactivateUserPayload::IncorrectPassword); + } + + // Deactivate the user right away + let user = repo + .user() + .deactivate(&state.clock(), browser_session.user.clone()) + .await?; + + // and then schedule a job to deactivate it fully + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + DeactivateUserJob::new(&user, input.hs_erase), + ) + .await?; + + repo.save().await?; + + Ok(DeactivateUserPayload::Deactivated(user)) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/mutations/user_email.rs b/matrix-authentication-service/crates/handlers/src/graphql/mutations/user_email.rs new file mode 100644 index 00000000..34fb5405 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/mutations/user_email.rs @@ -0,0 +1,849 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use async_graphql::{Context, Description, Enum, ID, InputObject, Object}; +use mas_i18n::DataLocale; +use mas_storage::{ + RepositoryAccess, + queue::{ProvisionUserJob, QueueJobRepositoryExt as _, SendEmailAuthenticationCodeJob}, + user::{UserEmailFilter, UserEmailRepository, UserRepository}, +}; + +use super::verify_password_if_needed; +use crate::graphql::{ + model::{NodeType, User, UserEmail, UserEmailAuthentication}, + state::ContextExt, +}; + +#[derive(Default)] +pub struct UserEmailMutations { + _private: (), +} + +/// The input for the `addEmail` mutation +#[derive(InputObject)] +struct AddEmailInput { + /// The email address to add + email: String, + + /// The ID of the user to add the email address to + user_id: ID, + + /// Skip the email address verification. Only allowed for admins. + skip_verification: Option, + + /// Skip the email address policy check. Only allowed for admins. + skip_policy_check: Option, +} + +/// The status of the `addEmail` mutation +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +pub enum AddEmailStatus { + /// The email address was added + Added, + /// The email address already exists + Exists, + /// The email address is invalid + Invalid, + /// The email address is not allowed by the policy + Denied, +} + +/// The payload of the `addEmail` mutation +#[derive(Description)] +enum AddEmailPayload { + Added(mas_data_model::UserEmail), + Exists(mas_data_model::UserEmail), + Invalid, + Denied { + violations: Vec, + }, +} + +#[Object(use_type_description)] +impl AddEmailPayload { + /// Status of the operation + async fn status(&self) -> AddEmailStatus { + match self { + AddEmailPayload::Added(_) => AddEmailStatus::Added, + AddEmailPayload::Exists(_) => AddEmailStatus::Exists, + AddEmailPayload::Invalid => AddEmailStatus::Invalid, + AddEmailPayload::Denied { .. } => AddEmailStatus::Denied, + } + } + + /// The email address that was added + async fn email(&self) -> Option { + match self { + AddEmailPayload::Added(email) | AddEmailPayload::Exists(email) => { + Some(UserEmail(email.clone())) + } + AddEmailPayload::Invalid | AddEmailPayload::Denied { .. } => None, + } + } + + /// The user to whom the email address was added + async fn user(&self, ctx: &Context<'_>) -> Result, async_graphql::Error> { + let state = ctx.state(); + let mut repo = state.repository().await?; + + let user_id = match self { + AddEmailPayload::Added(email) | AddEmailPayload::Exists(email) => email.user_id, + AddEmailPayload::Invalid | AddEmailPayload::Denied { .. } => return Ok(None), + }; + + let user = repo + .user() + .lookup(user_id) + .await? + .context("User not found")?; + + Ok(Some(User(user))) + } + + /// The list of policy violations if the email address was denied + async fn violations(&self) -> Option> { + let AddEmailPayload::Denied { violations } = self else { + return None; + }; + + let messages = violations.iter().map(|v| v.msg.clone()).collect(); + Some(messages) + } +} + +/// The input for the `removeEmail` mutation +#[derive(InputObject)] +struct RemoveEmailInput { + /// The ID of the email address to remove + user_email_id: ID, + + /// The user's current password. This is required if the user is not an + /// admin and it has a password on its account. + password: Option, +} + +/// The status of the `removeEmail` mutation +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum RemoveEmailStatus { + /// The email address was removed + Removed, + + /// The email address was not found + NotFound, + + /// The password provided is incorrect + IncorrectPassword, +} + +/// The payload of the `removeEmail` mutation +#[derive(Description)] +enum RemoveEmailPayload { + Removed(mas_data_model::UserEmail), + NotFound, + IncorrectPassword, +} + +#[Object(use_type_description)] +impl RemoveEmailPayload { + /// Status of the operation + async fn status(&self) -> RemoveEmailStatus { + match self { + RemoveEmailPayload::Removed(_) => RemoveEmailStatus::Removed, + RemoveEmailPayload::NotFound => RemoveEmailStatus::NotFound, + RemoveEmailPayload::IncorrectPassword => RemoveEmailStatus::IncorrectPassword, + } + } + + /// The email address that was removed + async fn email(&self) -> Option { + match self { + RemoveEmailPayload::Removed(email) => Some(UserEmail(email.clone())), + RemoveEmailPayload::NotFound | RemoveEmailPayload::IncorrectPassword => None, + } + } + + /// The user to whom the email address belonged + async fn user(&self, ctx: &Context<'_>) -> Result, async_graphql::Error> { + let state = ctx.state(); + + let user_id = match self { + RemoveEmailPayload::Removed(email) => email.user_id, + RemoveEmailPayload::NotFound | RemoveEmailPayload::IncorrectPassword => { + return Ok(None); + } + }; + + let mut repo = state.repository().await?; + + let user = repo + .user() + .lookup(user_id) + .await? + .context("User not found")?; + + Ok(Some(User(user))) + } +} + +/// The input for the `setPrimaryEmail` mutation +#[derive(InputObject)] +struct SetPrimaryEmailInput { + /// The ID of the email address to set as primary + user_email_id: ID, +} + +/// The status of the `setPrimaryEmail` mutation +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum SetPrimaryEmailStatus { + /// The email address was set as primary + Set, + /// The email address was not found + NotFound, + /// Can't make an unverified email address primary + Unverified, +} + +/// The payload of the `setPrimaryEmail` mutation +#[derive(Description)] +enum SetPrimaryEmailPayload { + Set(mas_data_model::User), + NotFound, +} + +#[Object(use_type_description)] +impl SetPrimaryEmailPayload { + async fn status(&self) -> SetPrimaryEmailStatus { + match self { + SetPrimaryEmailPayload::Set(_) => SetPrimaryEmailStatus::Set, + SetPrimaryEmailPayload::NotFound => SetPrimaryEmailStatus::NotFound, + } + } + + /// The user to whom the email address belongs + async fn user(&self) -> Option { + match self { + SetPrimaryEmailPayload::Set(user) => Some(User(user.clone())), + SetPrimaryEmailPayload::NotFound => None, + } + } +} + +/// The input for the `startEmailAuthentication` mutation +#[derive(InputObject)] +struct StartEmailAuthenticationInput { + /// The email address to add to the account + email: String, + + /// The user's current password. This is required if the user has a password + /// on its account. + password: Option, + + /// The language to use for the email + #[graphql(default = "en")] + language: String, +} + +/// The status of the `startEmailAuthentication` mutation +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum StartEmailAuthenticationStatus { + /// The email address was started + Started, + /// The email address is invalid + InvalidEmailAddress, + /// Too many attempts to start an email authentication + RateLimited, + /// The email address isn't allowed by the policy + Denied, + /// The email address is already in use on this account + InUse, + /// The password provided is incorrect + IncorrectPassword, +} + +/// The payload of the `startEmailAuthentication` mutation +#[derive(Description)] +enum StartEmailAuthenticationPayload { + Started(UserEmailAuthentication), + InvalidEmailAddress, + RateLimited, + Denied { + violations: Vec, + }, + InUse, + IncorrectPassword, +} + +#[Object(use_type_description)] +impl StartEmailAuthenticationPayload { + /// Status of the operation + async fn status(&self) -> StartEmailAuthenticationStatus { + match self { + Self::Started(_) => StartEmailAuthenticationStatus::Started, + Self::InvalidEmailAddress => StartEmailAuthenticationStatus::InvalidEmailAddress, + Self::RateLimited => StartEmailAuthenticationStatus::RateLimited, + Self::Denied { .. } => StartEmailAuthenticationStatus::Denied, + Self::InUse => StartEmailAuthenticationStatus::InUse, + Self::IncorrectPassword => StartEmailAuthenticationStatus::IncorrectPassword, + } + } + + /// The email authentication session that was started + async fn authentication(&self) -> Option<&UserEmailAuthentication> { + match self { + Self::Started(authentication) => Some(authentication), + Self::InvalidEmailAddress + | Self::RateLimited + | Self::Denied { .. } + | Self::InUse + | Self::IncorrectPassword => None, + } + } + + /// The list of policy violations if the email address was denied + async fn violations(&self) -> Option> { + let Self::Denied { violations } = self else { + return None; + }; + + let messages = violations.iter().map(|v| v.msg.clone()).collect(); + Some(messages) + } +} + +/// The input for the `completeEmailAuthentication` mutation +#[derive(InputObject)] +struct CompleteEmailAuthenticationInput { + /// The authentication code to use + code: String, + + /// The ID of the authentication session to complete + id: ID, +} + +/// The payload of the `completeEmailAuthentication` mutation +#[derive(Description)] +enum CompleteEmailAuthenticationPayload { + Completed, + InvalidCode, + CodeExpired, + InUse, + RateLimited, +} + +/// The status of the `completeEmailAuthentication` mutation +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum CompleteEmailAuthenticationStatus { + /// The authentication was completed + Completed, + /// The authentication code is invalid + InvalidCode, + /// The authentication code has expired + CodeExpired, + /// Too many attempts to complete an email authentication + RateLimited, + /// The email address is already in use + InUse, +} + +#[Object(use_type_description)] +impl CompleteEmailAuthenticationPayload { + /// Status of the operation + async fn status(&self) -> CompleteEmailAuthenticationStatus { + match self { + Self::Completed => CompleteEmailAuthenticationStatus::Completed, + Self::InvalidCode => CompleteEmailAuthenticationStatus::InvalidCode, + Self::CodeExpired => CompleteEmailAuthenticationStatus::CodeExpired, + Self::InUse => CompleteEmailAuthenticationStatus::InUse, + Self::RateLimited => CompleteEmailAuthenticationStatus::RateLimited, + } + } +} + +/// The input for the `resendEmailAuthenticationCode` mutation +#[derive(InputObject)] +struct ResendEmailAuthenticationCodeInput { + /// The ID of the authentication session to resend the code for + id: ID, + + /// The language to use for the email + #[graphql(default = "en")] + language: String, +} + +/// The payload of the `resendEmailAuthenticationCode` mutation +#[derive(Description)] +enum ResendEmailAuthenticationCodePayload { + /// The email was resent + Resent, + /// The email authentication session is already completed + Completed, + /// Too many attempts to resend an email authentication code + RateLimited, +} + +/// The status of the `resendEmailAuthenticationCode` mutation +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum ResendEmailAuthenticationCodeStatus { + /// The email was resent + Resent, + /// The email authentication session is already completed + Completed, + /// Too many attempts to resend an email authentication code + RateLimited, +} + +#[Object(use_type_description)] +impl ResendEmailAuthenticationCodePayload { + /// Status of the operation + async fn status(&self) -> ResendEmailAuthenticationCodeStatus { + match self { + Self::Resent => ResendEmailAuthenticationCodeStatus::Resent, + Self::Completed => ResendEmailAuthenticationCodeStatus::Completed, + Self::RateLimited => ResendEmailAuthenticationCodeStatus::RateLimited, + } + } +} + +#[Object] +impl UserEmailMutations { + /// Add an email address to the specified user + #[graphql(deprecation = "Use `startEmailAuthentication` instead.")] + async fn add_email( + &self, + ctx: &Context<'_>, + input: AddEmailInput, + ) -> Result { + let state = ctx.state(); + let id = NodeType::User.extract_ulid(&input.user_id)?; + let requester = ctx.requester(); + let clock = state.clock(); + let mut rng = state.rng(); + + // Only allow admin to call this mutation + if !requester.is_admin() { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let _skip_verification = input.skip_verification.unwrap_or(false); + let skip_policy_check = input.skip_policy_check.unwrap_or(false); + + let mut repo = state.repository().await?; + + let user = repo + .user() + .lookup(id) + .await? + .context("Failed to load user")?; + + // Validate the email address + if input.email.parse::().is_err() { + return Ok(AddEmailPayload::Invalid); + } + + if !skip_policy_check { + let mut policy = state.policy().await?; + let res = policy + .evaluate_email(mas_policy::EmailInput { + email: &input.email, + requester: requester.for_policy(), + }) + .await?; + if !res.valid() { + return Ok(AddEmailPayload::Denied { + violations: res.violations, + }); + } + } + + // Find an existing email address + let existing_user_email = repo.user_email().find(&user, &input.email).await?; + let (added, user_email) = if let Some(user_email) = existing_user_email { + (false, user_email) + } else { + let user_email = repo + .user_email() + .add(&mut rng, &clock, &user, input.email) + .await?; + + (true, user_email) + }; + + repo.save().await?; + + let payload = if added { + AddEmailPayload::Added(user_email) + } else { + AddEmailPayload::Exists(user_email) + }; + Ok(payload) + } + + /// Remove an email address + async fn remove_email( + &self, + ctx: &Context<'_>, + input: RemoveEmailInput, + ) -> Result { + let state = ctx.state(); + let user_email_id = NodeType::UserEmail.extract_ulid(&input.user_email_id)?; + let requester = ctx.requester(); + + let mut rng = state.rng(); + let clock = state.clock(); + let mut repo = state.repository().await?; + + let user_email = repo.user_email().lookup(user_email_id).await?; + let Some(user_email) = user_email else { + return Ok(RemoveEmailPayload::NotFound); + }; + + if !requester.is_owner_or_admin(&user_email) { + return Ok(RemoveEmailPayload::NotFound); + } + + // Allow non-admins to remove their email address if the site config allows it + if !requester.is_admin() && !state.site_config().email_change_allowed { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let user = repo + .user() + .lookup(user_email.user_id) + .await? + .context("Failed to load user")?; + + // Validate the password input if needed + if !verify_password_if_needed( + requester, + state.site_config(), + &state.password_manager(), + input.password, + &user, + &mut repo, + ) + .await? + { + return Ok(RemoveEmailPayload::IncorrectPassword); + } + + // TODO: don't allow removing the last email address + + repo.user_email().remove(user_email.clone()).await?; + + // Schedule a job to update the user + repo.queue_job() + .schedule_job(&mut rng, &clock, ProvisionUserJob::new(&user)) + .await?; + + repo.save().await?; + + Ok(RemoveEmailPayload::Removed(user_email)) + } + + /// Set an email address as primary + #[graphql( + deprecation = "This doesn't do anything anymore, but is kept to avoid breaking existing queries" + )] + async fn set_primary_email( + &self, + ctx: &Context<'_>, + input: SetPrimaryEmailInput, + ) -> Result { + let state = ctx.state(); + let user_email_id = NodeType::UserEmail.extract_ulid(&input.user_email_id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + + let user_email = repo.user_email().lookup(user_email_id).await?; + let Some(user_email) = user_email else { + return Ok(SetPrimaryEmailPayload::NotFound); + }; + + if !requester.is_owner_or_admin(&user_email) { + return Err(async_graphql::Error::new("Unauthorized")); + } + + // Allow non-admins to change their primary email address if the site config + // allows it + if !requester.is_admin() && !state.site_config().email_change_allowed { + return Err(async_graphql::Error::new("Unauthorized")); + } + + // The user primary email should already be up to date + let user = repo + .user() + .lookup(user_email.user_id) + .await? + .context("Failed to load user")?; + + repo.save().await?; + + Ok(SetPrimaryEmailPayload::Set(user)) + } + + /// Start a new email authentication flow + async fn start_email_authentication( + &self, + ctx: &Context<'_>, + input: StartEmailAuthenticationInput, + ) -> Result { + let state = ctx.state(); + let mut rng = state.rng(); + let clock = state.clock(); + let requester = ctx.requester(); + let limiter = state.limiter(); + + // Only allow calling this if the requester is a browser session + let Some(browser_session) = requester.browser_session() else { + return Err(async_graphql::Error::new("Unauthorized")); + }; + + // Allow to starting the email authentication flow if the site config allows it + if !state.site_config().email_change_allowed { + return Err(async_graphql::Error::new( + "Email changes are not allowed on this server", + )); + } + + if !state.site_config().email_change_allowed { + return Err(async_graphql::Error::new( + "Email authentication is not allowed on this server", + )); + } + + // Check if the locale is valid + let _: DataLocale = input.language.parse()?; + + // Check if the email address is valid + if input.email.parse::().is_err() { + return Ok(StartEmailAuthenticationPayload::InvalidEmailAddress); + } + + if let Err(e) = + limiter.check_email_authentication_email(requester.fingerprint(), &input.email) + { + tracing::warn!(error = &e as &dyn std::error::Error); + return Ok(StartEmailAuthenticationPayload::RateLimited); + } + + let mut repo = state.repository().await?; + + // Check if the email address is already in use by the same user + // We don't report here if the email address is already in use by another user, + // because we don't want to leak information about other users. We will do that + // only when the user enters the right verification code + let count = repo + .user_email() + .count( + UserEmailFilter::new() + .for_email(&input.email) + .for_user(&browser_session.user), + ) + .await?; + if count > 0 { + return Ok(StartEmailAuthenticationPayload::InUse); + } + + // Check if the email address is allowed by the policy + let mut policy = state.policy().await?; + let res = policy + .evaluate_email(mas_policy::EmailInput { + email: &input.email, + requester: requester.for_policy(), + }) + .await?; + if !res.valid() { + return Ok(StartEmailAuthenticationPayload::Denied { + violations: res.violations, + }); + } + + // Validate the password input if needed + if !verify_password_if_needed( + requester, + state.site_config(), + &state.password_manager(), + input.password, + &browser_session.user, + &mut repo, + ) + .await? + { + return Ok(StartEmailAuthenticationPayload::IncorrectPassword); + } + + // Create a new authentication session + let authentication = repo + .user_email() + .add_authentication_for_session(&mut rng, &clock, input.email, browser_session) + .await?; + + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + SendEmailAuthenticationCodeJob::new(&authentication, input.language), + ) + .await?; + + repo.save().await?; + + Ok(StartEmailAuthenticationPayload::Started( + UserEmailAuthentication(authentication), + )) + } + + /// Resend the email authentication code + async fn resend_email_authentication_code( + &self, + ctx: &Context<'_>, + input: ResendEmailAuthenticationCodeInput, + ) -> Result { + let state = ctx.state(); + let mut rng = state.rng(); + let clock = state.clock(); + let limiter = state.limiter(); + let requester = ctx.requester(); + + let id = NodeType::UserEmailAuthentication.extract_ulid(&input.id)?; + let Some(browser_session) = requester.browser_session() else { + return Err(async_graphql::Error::new("Unauthorized")); + }; + + // Allow to completing the email authentication flow if the site config allows + // it + if !state.site_config().email_change_allowed { + return Err(async_graphql::Error::new( + "Email changes are not allowed on this server", + )); + } + + // Check if the locale is valid + let _: DataLocale = input.language.parse()?; + + let mut repo = state.repository().await?; + + let Some(authentication) = repo.user_email().lookup_authentication(id).await? else { + return Ok(ResendEmailAuthenticationCodePayload::Completed); + }; + + // Make sure this authentication belongs to the requester + if authentication.user_session_id != Some(browser_session.id) { + return Err(async_graphql::Error::new("Unauthorized")); + } + + if authentication.completed_at.is_some() { + return Ok(ResendEmailAuthenticationCodePayload::Completed); + } + + if let Err(e) = + limiter.check_email_authentication_send_code(requester.fingerprint(), &authentication) + { + tracing::warn!(error = &e as &dyn std::error::Error); + return Ok(ResendEmailAuthenticationCodePayload::RateLimited); + } + + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + SendEmailAuthenticationCodeJob::new(&authentication, input.language), + ) + .await?; + + repo.save().await?; + + Ok(ResendEmailAuthenticationCodePayload::Resent) + } + + /// Complete the email authentication flow + async fn complete_email_authentication( + &self, + ctx: &Context<'_>, + input: CompleteEmailAuthenticationInput, + ) -> Result { + let state = ctx.state(); + let mut rng = state.rng(); + let clock = state.clock(); + let limiter = state.limiter(); + + let id = NodeType::UserEmailAuthentication.extract_ulid(&input.id)?; + + let Some(browser_session) = ctx.requester().browser_session() else { + return Err(async_graphql::Error::new("Unauthorized")); + }; + + // Allow to completing the email authentication flow if the site config allows + // it + if !state.site_config().email_change_allowed { + return Err(async_graphql::Error::new( + "Email changes are not allowed on this server", + )); + } + + let mut repo = state.repository().await?; + + let Some(authentication) = repo.user_email().lookup_authentication(id).await? else { + return Ok(CompleteEmailAuthenticationPayload::InvalidCode); + }; + + // Make sure this authentication belongs to the requester + if authentication.user_session_id != Some(browser_session.id) { + return Ok(CompleteEmailAuthenticationPayload::InvalidCode); + } + + if let Err(e) = limiter.check_email_authentication_attempt(&authentication) { + tracing::warn!(error = &e as &dyn std::error::Error); + return Ok(CompleteEmailAuthenticationPayload::RateLimited); + } + + let Some(code) = repo + .user_email() + .find_authentication_code(&authentication, &input.code) + .await? + else { + return Ok(CompleteEmailAuthenticationPayload::InvalidCode); + }; + + if code.expires_at < state.clock().now() { + return Ok(CompleteEmailAuthenticationPayload::CodeExpired); + } + + let authentication = repo + .user_email() + .complete_authentication_with_code(&clock, authentication, &code) + .await?; + + // Check the email is not already in use by anyone, including the current user + let count = repo + .user_email() + .count(UserEmailFilter::new().for_email(&authentication.email)) + .await?; + + if count > 0 { + // We still want to consume the code so that it can't be reused + repo.save().await?; + + return Ok(CompleteEmailAuthenticationPayload::InUse); + } + + repo.user_email() + .add( + &mut rng, + &clock, + &browser_session.user, + authentication.email, + ) + .await?; + + repo.save().await?; + + Ok(CompleteEmailAuthenticationPayload::Completed) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/query/mod.rs b/matrix-authentication-service/crates/handlers/src/graphql/query/mod.rs new file mode 100644 index 00000000..66e6b38b --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/query/mod.rs @@ -0,0 +1,300 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{Context, ID, MergedObject, Object}; + +use crate::graphql::{ + model::{ + Anonymous, BrowserSession, CompatSession, Node, NodeType, OAuth2Client, OAuth2Session, + SiteConfig, User, UserEmail, UserRecoveryTicket, + }, + state::ContextExt, +}; + +mod session; +mod upstream_oauth; +mod user; +mod viewer; + +use self::{ + session::SessionQuery, upstream_oauth::UpstreamOAuthQuery, user::UserQuery, viewer::ViewerQuery, +}; +use super::model::UserEmailAuthentication; + +/// The query root of the GraphQL interface. +#[derive(Default, MergedObject)] +pub struct Query( + BaseQuery, + UserQuery, + UpstreamOAuthQuery, + SessionQuery, + ViewerQuery, +); + +impl Query { + #[must_use] + pub fn new() -> Self { + Self::default() + } +} + +#[derive(Default)] +struct BaseQuery; + +// TODO: move the rest of the queries in separate modules +#[Object] +impl BaseQuery { + /// Get the current logged in browser session + #[graphql(deprecation = "Use `viewerSession` instead.")] + async fn current_browser_session( + &self, + ctx: &Context<'_>, + ) -> Result, async_graphql::Error> { + let requester = ctx.requester(); + Ok(requester + .browser_session() + .cloned() + .map(BrowserSession::from)) + } + + /// Get the current logged in user + #[graphql(deprecation = "Use `viewer` instead.")] + async fn current_user(&self, ctx: &Context<'_>) -> Result, async_graphql::Error> { + let requester = ctx.requester(); + Ok(requester.user().cloned().map(User::from)) + } + + /// Fetch an OAuth 2.0 client by its ID. + async fn oauth2_client( + &self, + ctx: &Context<'_>, + id: ID, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let id = NodeType::OAuth2Client.extract_ulid(&id)?; + + let mut repo = state.repository().await?; + let client = repo.oauth2_client().lookup(id).await?; + repo.cancel().await?; + + Ok(client.map(OAuth2Client)) + } + + /// Fetch a browser session by its ID. + async fn browser_session( + &self, + ctx: &Context<'_>, + id: ID, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let id = NodeType::BrowserSession.extract_ulid(&id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let browser_session = repo.browser_session().lookup(id).await?; + repo.cancel().await?; + + let Some(browser_session) = browser_session else { + return Ok(None); + }; + + if !requester.is_owner_or_admin(&browser_session) { + return Ok(None); + } + + Ok(Some(BrowserSession(browser_session))) + } + + /// Fetch a compatible session by its ID. + async fn compat_session( + &self, + ctx: &Context<'_>, + id: ID, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let id = NodeType::CompatSession.extract_ulid(&id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let compat_session = repo.compat_session().lookup(id).await?; + repo.cancel().await?; + + let Some(compat_session) = compat_session else { + return Ok(None); + }; + + if !requester.is_owner_or_admin(&compat_session) { + return Ok(None); + } + + Ok(Some(CompatSession::new(compat_session))) + } + + /// Fetch an OAuth 2.0 session by its ID. + async fn oauth2_session( + &self, + ctx: &Context<'_>, + id: ID, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let id = NodeType::OAuth2Session.extract_ulid(&id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let oauth2_session = repo.oauth2_session().lookup(id).await?; + repo.cancel().await?; + + let Some(oauth2_session) = oauth2_session else { + return Ok(None); + }; + + if !requester.is_owner_or_admin(&oauth2_session) { + return Ok(None); + } + + Ok(Some(OAuth2Session(oauth2_session))) + } + + /// Fetch a user email by its ID. + async fn user_email( + &self, + ctx: &Context<'_>, + id: ID, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let id = NodeType::UserEmail.extract_ulid(&id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let user_email = repo.user_email().lookup(id).await?; + repo.cancel().await?; + + let Some(user_email) = user_email else { + return Ok(None); + }; + + if !requester.is_owner_or_admin(&user_email) { + return Ok(None); + } + + Ok(Some(UserEmail(user_email))) + } + + /// Fetch a user recovery ticket. + async fn user_recovery_ticket( + &self, + ctx: &Context<'_>, + ticket: String, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let mut repo = state.repository().await?; + let ticket = repo.user_recovery().find_ticket(&ticket).await?; + repo.cancel().await?; + + Ok(ticket.map(UserRecoveryTicket)) + } + + /// Fetch a user email authentication session + async fn user_email_authentication( + &self, + ctx: &Context<'_>, + id: ID, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let id = NodeType::UserEmailAuthentication.extract_ulid(&id)?; + let requester = ctx.requester(); + let mut repo = state.repository().await?; + let authentication = repo.user_email().lookup_authentication(id).await?; + let Some(authentication) = authentication else { + return Ok(None); + }; + + let Some(browser_session) = requester.browser_session() else { + return Ok(None); + }; + + if authentication.user_session_id != Some(browser_session.id) { + return Ok(None); + } + + Ok(Some(UserEmailAuthentication(authentication))) + } + + /// Fetches an object given its ID. + async fn node(&self, ctx: &Context<'_>, id: ID) -> Result, async_graphql::Error> { + // Special case for the anonymous user + if id.as_str() == "anonymous" { + return Ok(Some(Node::Anonymous(Box::new(Anonymous)))); + } + + if id.as_str() == crate::graphql::model::SITE_CONFIG_ID { + return Ok(Some(Node::SiteConfig(Box::new(SiteConfig::new( + ctx.state().site_config(), + ))))); + } + + let (node_type, _id) = NodeType::from_id(&id)?; + + let ret = match node_type { + // TODO + NodeType::Authentication | NodeType::CompatSsoLogin | NodeType::UserRecoveryTicket => { + None + } + + NodeType::UpstreamOAuth2Provider => UpstreamOAuthQuery + .upstream_oauth2_provider(ctx, id) + .await? + .map(|c| Node::UpstreamOAuth2Provider(Box::new(c))), + + NodeType::UpstreamOAuth2Link => UpstreamOAuthQuery + .upstream_oauth2_link(ctx, id) + .await? + .map(|c| Node::UpstreamOAuth2Link(Box::new(c))), + + NodeType::OAuth2Client => self + .oauth2_client(ctx, id) + .await? + .map(|c| Node::OAuth2Client(Box::new(c))), + + NodeType::UserEmail => self + .user_email(ctx, id) + .await? + .map(|e| Node::UserEmail(Box::new(e))), + + NodeType::UserEmailAuthentication => self + .user_email_authentication(ctx, id) + .await? + .map(|e| Node::UserEmailAuthentication(Box::new(e))), + + NodeType::CompatSession => self + .compat_session(ctx, id) + .await? + .map(|s| Node::CompatSession(Box::new(s))), + + NodeType::OAuth2Session => self + .oauth2_session(ctx, id) + .await? + .map(|s| Node::OAuth2Session(Box::new(s))), + + NodeType::BrowserSession => self + .browser_session(ctx, id) + .await? + .map(|s| Node::BrowserSession(Box::new(s))), + + NodeType::User => UserQuery + .user(ctx, id) + .await? + .map(|u| Node::User(Box::new(u))), + }; + + Ok(ret) + } + + /// Get the current site configuration + async fn site_config(&self, ctx: &Context<'_>) -> SiteConfig { + SiteConfig::new(ctx.state().site_config()) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/query/session.rs b/matrix-authentication-service/crates/handlers/src/graphql/query/session.rs new file mode 100644 index 00000000..82ca55fd --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/query/session.rs @@ -0,0 +1,106 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{Context, ID, Object, Union}; +use mas_data_model::Device; +use mas_storage::{ + Pagination, RepositoryAccess, + compat::{CompatSessionFilter, CompatSessionRepository}, + oauth2::OAuth2SessionFilter, +}; + +use crate::graphql::{ + UserId, + model::{CompatSession, NodeType, OAuth2Session}, + state::ContextExt, +}; + +#[derive(Default)] +pub struct SessionQuery; + +/// A client session, either compat or OAuth 2.0 +#[derive(Union)] +enum Session { + CompatSession(Box), + OAuth2Session(Box), +} + +#[Object] +impl SessionQuery { + /// Lookup a compat or OAuth 2.0 session + async fn session( + &self, + ctx: &Context<'_>, + user_id: ID, + device_id: String, + ) -> Result, async_graphql::Error> { + let user_id = NodeType::User.extract_ulid(&user_id)?; + let requester = ctx.requester(); + if !requester.is_owner_or_admin(&UserId(user_id)) { + return Ok(None); + } + + let device = Device::from(device_id); + let state = ctx.state(); + let mut repo = state.repository().await?; + + // Lookup the user + let Some(user) = repo.user().lookup(user_id).await? else { + return Ok(None); + }; + + // First, try to find a compat session + let filter = CompatSessionFilter::new() + .for_user(&user) + .active_only() + .for_device(&device); + // We only want most recent session + let pagination = Pagination::last(1); + let compat_sessions = repo.compat_session().list(filter, pagination).await?; + + if compat_sessions.has_previous_page { + // XXX: should we bail out? + tracing::warn!( + "Found more than one active session with device {device} for user {user_id}" + ); + } + + if let Some(edge) = compat_sessions.edges.into_iter().next() { + let (compat_session, sso_login) = edge.node; + repo.cancel().await?; + + return Ok(Some(Session::CompatSession(Box::new( + CompatSession::new(compat_session).with_loaded_sso_login(sso_login), + )))); + } + + // Then, try to find an OAuth 2.0 session. + let filter = OAuth2SessionFilter::new() + .for_user(&user) + .active_only() + .for_device(&device); + let sessions = repo.oauth2_session().list(filter, pagination).await?; + + // It's possible to have multiple active OAuth 2.0 sessions. For now, we just + // log it if it is the case + if sessions.has_previous_page { + // XXX: should we bail out? + tracing::warn!( + "Found more than one active session with device {device} for user {user_id}" + ); + } + + if let Some(edge) = sessions.edges.into_iter().next() { + repo.cancel().await?; + return Ok(Some(Session::OAuth2Session(Box::new(OAuth2Session( + edge.node, + ))))); + } + repo.cancel().await?; + + Ok(None) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/query/upstream_oauth.rs b/matrix-authentication-service/crates/handlers/src/graphql/query/upstream_oauth.rs new file mode 100644 index 00000000..f0b4ceee --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/query/upstream_oauth.rs @@ -0,0 +1,145 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{ + Context, ID, Object, + connection::{Connection, Edge, OpaqueCursor, query}, +}; +use mas_storage::{Pagination, RepositoryAccess, upstream_oauth2::UpstreamOAuthProviderFilter}; + +use crate::graphql::{ + model::{ + Cursor, NodeCursor, NodeType, PreloadedTotalCount, UpstreamOAuth2Link, + UpstreamOAuth2Provider, + }, + state::ContextExt, +}; + +#[derive(Default)] +pub struct UpstreamOAuthQuery; + +#[Object] +impl UpstreamOAuthQuery { + /// Fetch an upstream OAuth 2.0 link by its ID. + pub async fn upstream_oauth2_link( + &self, + ctx: &Context<'_>, + id: ID, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let id = NodeType::UpstreamOAuth2Link.extract_ulid(&id)?; + let requester = ctx.requester(); + + let mut repo = state.repository().await?; + let link = repo.upstream_oauth_link().lookup(id).await?; + repo.cancel().await?; + + let Some(link) = link else { + return Ok(None); + }; + + if !requester.is_owner_or_admin(&link) { + return Ok(None); + } + + Ok(Some(UpstreamOAuth2Link::new(link))) + } + + /// Fetch an upstream OAuth 2.0 provider by its ID. + pub async fn upstream_oauth2_provider( + &self, + ctx: &Context<'_>, + id: ID, + ) -> Result, async_graphql::Error> { + let state = ctx.state(); + let id = NodeType::UpstreamOAuth2Provider.extract_ulid(&id)?; + + let mut repo = state.repository().await?; + let provider = repo.upstream_oauth_provider().lookup(id).await?; + repo.cancel().await?; + + let Some(provider) = provider else { + return Ok(None); + }; + + // We only allow enabled providers to be fetched + if !provider.enabled() { + return Ok(None); + } + + Ok(Some(UpstreamOAuth2Provider::new(provider))) + } + + /// Get a list of upstream OAuth 2.0 providers. + async fn upstream_oauth2_providers( + &self, + ctx: &Context<'_>, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> + { + let state = ctx.state(); + let mut repo = state.repository().await?; + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| { + x.extract_for_type(NodeType::UpstreamOAuth2Provider) + }) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| { + x.extract_for_type(NodeType::UpstreamOAuth2Provider) + }) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + // We only want enabled providers + // XXX: we may want to let admins see disabled providers + let filter = UpstreamOAuthProviderFilter::new().enabled_only(); + + let page = repo + .upstream_oauth_provider() + .list(filter, pagination) + .await?; + + // Preload the total count if requested + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.upstream_oauth_provider().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + connection.edges.extend(page.edges.into_iter().map(|edge| { + Edge::new( + OpaqueCursor(NodeCursor(NodeType::UpstreamOAuth2Provider, edge.cursor)), + UpstreamOAuth2Provider::new(edge.node), + ) + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/query/user.rs b/matrix-authentication-service/crates/handlers/src/graphql/query/user.rs new file mode 100644 index 00000000..bb55ef67 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/query/user.rs @@ -0,0 +1,168 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{ + Context, Enum, ID, Object, + connection::{Connection, Edge, OpaqueCursor, query}, +}; +use mas_storage::{Pagination, user::UserFilter}; + +use crate::graphql::{ + UserId, + model::{Cursor, NodeCursor, NodeType, PreloadedTotalCount, User}, + state::ContextExt as _, +}; + +#[derive(Default)] +pub struct UserQuery; + +#[Object] +impl UserQuery { + /// Fetch a user by its ID. + pub async fn user( + &self, + ctx: &Context<'_>, + id: ID, + ) -> Result, async_graphql::Error> { + let id = NodeType::User.extract_ulid(&id)?; + + let requester = ctx.requester(); + if !requester.is_owner_or_admin(&UserId(id)) { + return Ok(None); + } + + // We could avoid the database lookup if the requester is the user we're looking + // for but that would make the code more complex and we're not very + // concerned about performance yet + let state = ctx.state(); + let mut repo = state.repository().await?; + let user = repo.user().lookup(id).await?; + repo.cancel().await?; + + Ok(user.map(User)) + } + + /// Fetch a user by its username. + async fn user_by_username( + &self, + ctx: &Context<'_>, + username: String, + ) -> Result, async_graphql::Error> { + let requester = ctx.requester(); + let state = ctx.state(); + let mut repo = state.repository().await?; + + let user = repo.user().find_by_username(&username).await?; + let Some(user) = user else { + // We don't want to leak the existence of a user + return Ok(None); + }; + + // Users can only see themselves, except for admins + if !requester.is_owner_or_admin(&user) { + return Ok(None); + } + + Ok(Some(User(user))) + } + + /// Get a list of users. + /// + /// This is only available to administrators. + async fn users( + &self, + ctx: &Context<'_>, + + #[graphql(name = "state", desc = "List only users with the given state.")] + state_param: Option, + + #[graphql( + name = "canRequestAdmin", + desc = "List only users with the given 'canRequestAdmin' value" + )] + can_request_admin_param: Option, + + #[graphql(desc = "Returns the elements in the list that come after the cursor.")] + after: Option, + #[graphql(desc = "Returns the elements in the list that come before the cursor.")] + before: Option, + #[graphql(desc = "Returns the first *n* elements from the list.")] first: Option, + #[graphql(desc = "Returns the last *n* elements from the list.")] last: Option, + ) -> Result, async_graphql::Error> { + let requester = ctx.requester(); + if !requester.is_admin() { + return Err(async_graphql::Error::new("Unauthorized")); + } + + let state = ctx.state(); + let mut repo = state.repository().await?; + + query( + after, + before, + first, + last, + async |after, before, first, last| { + let after_id = after + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::User)) + .transpose()?; + let before_id = before + .map(|x: OpaqueCursor| x.extract_for_type(NodeType::User)) + .transpose()?; + let pagination = Pagination::try_new(before_id, after_id, first, last)?; + + // Build the query filter + let filter = UserFilter::new(); + let filter = match can_request_admin_param { + Some(true) => filter.can_request_admin_only(), + Some(false) => filter.cannot_request_admin_only(), + None => filter, + }; + let filter = match state_param { + Some(UserState::Active) => filter.active_only(), + Some(UserState::Locked) => filter.locked_only(), + None => filter, + }; + + let page = repo.user().list(filter, pagination).await?; + + // Preload the total count if requested + let count = if ctx.look_ahead().field("totalCount").exists() { + Some(repo.user().count(filter).await?) + } else { + None + }; + + repo.cancel().await?; + + let mut connection = Connection::with_additional_fields( + page.has_previous_page, + page.has_next_page, + PreloadedTotalCount(count), + ); + connection.edges.extend(page.edges.into_iter().map(|edge| { + Edge::new( + OpaqueCursor(NodeCursor(NodeType::User, edge.cursor)), + User(edge.node), + ) + })); + + Ok::<_, async_graphql::Error>(connection) + }, + ) + .await + } +} + +/// The state of a user. +#[derive(Enum, Copy, Clone, Eq, PartialEq)] +enum UserState { + /// The user is active. + Active, + + /// The user is locked. + Locked, +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/query/viewer.rs b/matrix-authentication-service/crates/handlers/src/graphql/query/viewer.rs new file mode 100644 index 00000000..defcb557 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/query/viewer.rs @@ -0,0 +1,44 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{Context, Object}; + +use crate::graphql::{ + model::{Viewer, ViewerSession}, + state::ContextExt, +}; + +#[derive(Default)] +pub struct ViewerQuery; + +#[Object] +impl ViewerQuery { + /// Get the viewer + async fn viewer(&self, ctx: &Context<'_>) -> Viewer { + let requester = ctx.requester(); + + if let Some(user) = requester.user() { + return Viewer::user(user.clone()); + } + + Viewer::anonymous() + } + + /// Get the viewer's session + async fn viewer_session(&self, ctx: &Context<'_>) -> ViewerSession { + let requester = ctx.requester(); + + if let Some(session) = requester.browser_session() { + return ViewerSession::browser_session(session.clone()); + } + + if let Some(session) = requester.oauth2_session() { + return ViewerSession::oauth2_session(session.clone()); + } + + ViewerSession::anonymous() + } +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/state.rs b/matrix-authentication-service/crates/handlers/src/graphql/state.rs new file mode 100644 index 00000000..7faf7633 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/state.rs @@ -0,0 +1,74 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_graphql::{Response, ServerError}; +use mas_data_model::{BoxClock, BoxRng, SiteConfig}; +use mas_matrix::HomeserverConnection; +use mas_policy::Policy; +use mas_router::UrlBuilder; +use mas_storage::{BoxRepository, RepositoryError}; + +use crate::{Limiter, graphql::Requester, passwords::PasswordManager}; + +const CLEAR_SESSION_SENTINEL: &str = "__CLEAR_SESSION__"; + +#[async_trait::async_trait] +pub trait State { + async fn repository(&self) -> Result; + async fn policy(&self) -> Result; + fn password_manager(&self) -> PasswordManager; + fn homeserver_connection(&self) -> &dyn HomeserverConnection; + fn clock(&self) -> BoxClock; + fn rng(&self) -> BoxRng; + fn site_config(&self) -> &SiteConfig; + fn url_builder(&self) -> &UrlBuilder; + fn limiter(&self) -> &Limiter; +} + +pub type BoxState = Box; + +pub trait ContextExt { + fn state(&self) -> &BoxState; + + fn mark_session_ended(&self); + + fn requester(&self) -> &Requester; +} + +impl ContextExt for async_graphql::Context<'_> { + fn state(&self) -> &BoxState { + self.data_unchecked() + } + + fn mark_session_ended(&self) { + // Add a sentinel to the error context, so that we can know that we need to + // clear the session + // XXX: this is a bit of a hack, but the only sane way to get infos from within + // a mutation up to the HTTP handler + self.add_error(ServerError::new(CLEAR_SESSION_SENTINEL, None)); + } + + fn requester(&self) -> &Requester { + self.data_unchecked() + } +} + +/// Returns true if the response contains a sentinel error indicating that the +/// current cookie session has ended, and the session cookie should be cleared. +/// +/// Also removes the sentinel error from the response. +pub fn has_session_ended(response: &mut Response) -> bool { + let errors = std::mem::take(&mut response.errors); + let mut must_clear_session = false; + for error in errors { + if error.message == CLEAR_SESSION_SENTINEL { + must_clear_session = true; + } else { + response.errors.push(error); + } + } + must_clear_session +} diff --git a/matrix-authentication-service/crates/handlers/src/graphql/tests.rs b/matrix-authentication-service/crates/handlers/src/graphql/tests.rs new file mode 100644 index 00000000..888d477d --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/graphql/tests.rs @@ -0,0 +1,1080 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::http::Request; +use hyper::StatusCode; +use mas_axum_utils::SessionInfoExt; +use mas_data_model::{AccessToken, Client, TokenType, User}; +use mas_matrix::{HomeserverConnection, ProvisionRequest}; +use mas_router::SimpleRoute; +use mas_storage::{ + RepositoryAccess, + oauth2::{OAuth2AccessTokenRepository, OAuth2ClientRepository}, +}; +use oauth2_types::{ + registration::ClientRegistrationResponse, + requests::AccessTokenResponse, + scope::{OPENID, Scope, ScopeToken}, +}; +use sqlx::PgPool; +use zeroize::Zeroizing; + +use crate::test_utils::{self, CookieHelper, RequestBuilderExt, ResponseExt, TestState, setup}; + +async fn create_test_client(state: &TestState) -> Client { + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + + let client = repo + .oauth2_client() + .add( + &mut rng, + &state.clock, + vec![], + None, + None, + None, + vec![], + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + client +} + +async fn create_test_user + Send>(state: &TestState, username: U) -> User { + let username = username.into(); + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + + let user = repo + .user() + .add(&mut rng, &state.clock, username) + .await + .unwrap(); + + repo.save().await.unwrap(); + + user +} + +async fn start_oauth_session( + state: &TestState, + client: &Client, + user: &User, + scope: Scope, +) -> AccessToken { + let mut repo = state.repository().await.unwrap(); + let mut rng = state.rng(); + + let browser_session = repo + .browser_session() + .add(&mut rng, &state.clock, user, None) + .await + .unwrap(); + + let session = repo + .oauth2_session() + .add_from_browser_session(&mut rng, &state.clock, client, &browser_session, scope) + .await + .unwrap(); + + let access_token_str = TokenType::AccessToken.generate(&mut rng); + + let access_token = repo + .oauth2_access_token() + .add(&mut rng, &state.clock, &session, access_token_str, None) + .await + .unwrap(); + + repo.save().await.unwrap(); + + access_token +} + +const GRAPHQL: ScopeToken = ScopeToken::from_static("urn:mas:graphql:*"); +const ADMIN: ScopeToken = ScopeToken::from_static("urn:mas:admin"); + +#[derive(serde::Deserialize)] +struct GraphQLResponse { + #[serde(default)] + data: serde_json::Value, + #[serde(default)] + errors: Vec, +} + +/// Test that the GraphQL endpoint can be queried with a GET request. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_get(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let request = Request::get("/graphql?query={viewer{__typename}}").empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + + assert!(response.errors.is_empty()); + assert_eq!( + response.data, + serde_json::json!({ + "viewer": { + "__typename": "Anonymous", + }, + }) + ); +} + +/// Test that the GraphQL endpoint can be queried with a POST request +/// anonymously. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_anonymous_viewer(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let req = Request::post("/graphql").json(serde_json::json!({ + "query": r" + query { + viewer { + __typename + } + } + ", + })); + + let response = state.request(req).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + + assert!(response.errors.is_empty()); + assert_eq!( + response.data, + serde_json::json!({ + "viewer": { + "__typename": "Anonymous", + }, + }) + ); +} + +/// Test that the GraphQL endpoint can be authenticated with a bearer token. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_oauth2_viewer(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Start by creating a user, a client and a token + let client = create_test_client(&state).await; + let user = create_test_user(&state, "alice").await; + let access_token = + start_oauth_session(&state, &client, &user, Scope::from_iter([GRAPHQL])).await; + let access_token = access_token.access_token; + + let req = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r" + query { + viewer { + __typename + + ... on User { + id + username + } + } + } + ", + })); + + let response = state.request(req).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + + assert!(response.errors.is_empty()); + assert_eq!( + response.data, + serde_json::json!({ + "viewer": { + "__typename": "User", + "id": format!("user:{id}", id = user.id), + "username": "alice", + }, + }) + ); +} + +/// Test that the GraphQL endpoint requires the GraphQL scope. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_oauth2_no_scope(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Start by creating a user, a client and a token + let client = create_test_client(&state).await; + let user = create_test_user(&state, "alice").await; + let access_token = + start_oauth_session(&state, &client, &user, Scope::from_iter([OPENID])).await; + let access_token = access_token.access_token; + + let req = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r" + query { + viewer { + __typename + } + } + ", + })); + + let response = state.request(req).await; + response.assert_status(StatusCode::UNAUTHORIZED); + let response: GraphQLResponse = response.json(); + + assert_eq!( + response.errors, + vec![serde_json::json!({ + "message": "Missing urn:mas:graphql:* scope", + })] + ); + assert_eq!(response.data, serde_json::json!(null)); +} + +/// Test the admin scope on the GraphQL endpoint. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_oauth2_admin(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Start by creating a user, a client and two tokens + let client = create_test_client(&state).await; + let user = create_test_user(&state, "alice").await; + + // Regular access token + let access_token = + start_oauth_session(&state, &client, &user, Scope::from_iter([GRAPHQL])).await; + let access_token = access_token.access_token; + + // Admin access token + let access_token_admin = + start_oauth_session(&state, &client, &user, Scope::from_iter([GRAPHQL, ADMIN])).await; + let access_token_admin = access_token_admin.access_token; + + // Create a second user and try to query stuff about it + let user2 = create_test_user(&state, "bob").await; + + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r" + query UserQuery($id: ID) { + user(id: $id) { + id + username + } + } + ", + "variables": { + "id": format!("user:{id}", id = user2.id), + }, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + + // It should not find the user, because it's not the owner and not an admin + assert!(response.errors.is_empty()); + assert_eq!( + response.data, + serde_json::json!({ + "user": null, + }) + ); + + // Do the same request with the admin token + let request = Request::post("/graphql") + .bearer(&access_token_admin) + .json(serde_json::json!({ + "query": r" + query UserQuery($id: ID) { + user(id: $id) { + id + username + } + } + ", + "variables": { + "id": format!("user:{id}", id = user2.id), + }, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + + // It should find the user, because the token has the admin scope + assert!(response.errors.is_empty()); + assert_eq!( + response.data, + serde_json::json!({ + "user": { + "id": format!("user:{id}", id = user2.id), + "username": "bob", + }, + }) + ); +} + +/// Test that we can query the GraphQL endpoint with a token from a +/// `client_credentials` grant. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_oauth2_client_credentials(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "token_endpoint_auth_method": "client_secret_post", + "grant_types": ["client_credentials"], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let response: ClientRegistrationResponse = response.json(); + let client_id = response.client_id; + let client_secret = response.client_secret.expect("to have a client secret"); + + // Call the token endpoint with the graphql scope + let request = Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "client_credentials", + "client_id": client_id, + "client_secret": client_secret, + "scope": "urn:mas:graphql:*", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let AccessTokenResponse { access_token, .. } = response.json(); + + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r" + query { + viewer { + __typename + } + + viewerSession { + __typename + } + } + ", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty()); + assert_eq!( + response.data, + serde_json::json!({ + "viewer": { + // There is no user associated with the client credentials grant + "__typename": "Anonymous", + }, + "viewerSession": { + // But there is a session + "__typename": "Oauth2Session", + }, + }) + ); + + // We shouldn't be able to call the addUser mutation + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r#" + mutation { + addUser(input: {username: "alice"}) { + user { + id + username + } + } + } + "#, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + // There should be an error + assert_eq!(response.errors.len(), 1); + assert!(response.data.is_null()); + + // Check that we can't do a query once the token is revoked + let request = Request::post(mas_router::OAuth2Revocation::PATH).form(serde_json::json!({ + "token": access_token, + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + // Do the same request again + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r" + query { + viewer { + __typename + } + + viewerSession { + __typename + } + } + ", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::UNAUTHORIZED); + + // Now make the client admin and try again + let state = { + let mut state = state; + state.policy_factory = test_utils::policy_factory( + "example.com", + serde_json::json!({ + "admin_clients": [client_id], + }), + ) + .await + .unwrap(); + state + }; + + // Ask for a token again, with the admin scope + let request = Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "client_credentials", + "client_id": client_id, + "client_secret": client_secret, + "scope": "urn:mas:graphql:* urn:mas:admin", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let AccessTokenResponse { access_token, .. } = response.json(); + + // We should now be able to call the addUser mutation + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r#" + mutation { + addUser(input: {username: "alice"}) { + user { + id + username + } + } + } + "#, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + let user_id = response.data["addUser"]["user"]["id"].as_str().unwrap(); + + assert_eq!( + response.data, + serde_json::json!({ + "addUser": { + "user": { + "id": user_id, + "username": "alice" + } + } + }) + ); + + // XXX: we don't run the task worker here, so even though the addUser mutation + // should have scheduled a job to provision the user, it won't run in the test, + // so we need to do it manually + state + .homeserver_connection + .provision_user(&ProvisionRequest::new("alice", user_id)) + .await + .unwrap(); + + // We should now be able to create an arbitrary access token for the user + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r" + mutation CreateSession($userId: String!, $scope: String!) { + createOauth2Session(input: {userId: $userId, permanent: true, scope: $scope}) { + accessToken + refreshToken + } + } + ", + "variables": { + "userId": user_id, + "scope": "urn:matrix:org.matrix.msc2967.client:device:AABBCCDDEE urn:matrix:org.matrix.msc2967.client:api:* urn:synapse:admin:*" + }, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + assert!(response.data["createOauth2Session"]["refreshToken"].is_null()); + assert!(response.data["createOauth2Session"]["accessToken"].is_string()); + + let token = response.data["createOauth2Session"]["accessToken"] + .as_str() + .unwrap(); + + // We should find this token in the database + let mut repo = state.repository().await.unwrap(); + let token = repo + .oauth2_access_token() + .find_by_token(token) + .await + .unwrap(); + assert!(token.is_some()); +} + +/// Test the addUser mutation +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_add_user(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "token_endpoint_auth_method": "client_secret_post", + "grant_types": ["client_credentials"], + })); + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let response: ClientRegistrationResponse = response.json(); + let client_id = response.client_id; + let client_secret = response.client_secret.expect("to have a client secret"); + + // Make the client admin + let state = { + let mut state = state; + state.policy_factory = test_utils::policy_factory( + "example.com", + serde_json::json!({ + "admin_clients": [client_id], + }), + ) + .await + .unwrap(); + state + }; + + // Ask for a token with the admin scope + let request = Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "client_credentials", + "client_id": client_id, + "client_secret": client_secret, + "scope": "urn:mas:graphql:* urn:mas:admin", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let AccessTokenResponse { access_token, .. } = response.json(); + + // We should now be able to call the addUser mutation + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r#" + mutation { + addUser(input: {username: "alice"}) { + status + user { + id + username + } + } + } + "#, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + let user_id = &response.data["addUser"]["user"]["id"]; + + assert_eq!( + response.data, + serde_json::json!({ + "addUser": { + "status": "ADDED", + "user": { + "id": user_id, + "username": "alice" + } + } + }) + ); + + // If we add again, it should return the user with a status of "EXISTS" + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r#" + mutation { + addUser(input: {username: "alice"}) { + status + user { + id + username + } + } + } + "#, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + + assert_eq!( + response.data, + serde_json::json!({ + "addUser": { + "status": "EXISTS", + "user": { + "id": user_id, + "username": "alice" + } + } + }) + ); + + // Reserve a username on the homeserver and try to add it + state.homeserver_connection.reserve_localpart("bob").await; + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r#" + mutation { + addUser(input: {username: "bob"}) { + status + user { + username + } + } + } + "#, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + + assert_eq!( + response.data, + serde_json::json!({ + "addUser": { + "status": "RESERVED", + "user": null, + } + }) + ); + + // But we can force it with the skipHomeserverCheck flag + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r#" + mutation { + addUser(input: {username: "bob", skipHomeserverCheck: true}) { + status + user { + username + } + } + } + "#, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + + assert_eq!( + response.data, + serde_json::json!({ + "addUser": { + "status": "ADDED", + "user": { + "username": "bob", + }, + } + }) + ); + + // This mutation shouldn't accept an invalid username + let request = Request::post("/graphql") + .bearer(&access_token) + .json(serde_json::json!({ + "query": r#" + mutation { + addUser(input: {username: "this is invalid"}) { + status + user { + username + } + } + } + "#, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + + assert_eq!( + response.data, + serde_json::json!({ + "addUser": { + "status": "INVALID", + "user": null, + } + }) + ); +} + +/// Test the setPassword mutation where the current password provided is +/// wrong. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_set_password_rejected_wrong_password(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let password = Zeroizing::new("current.password.123".to_owned()); + let (version, hashed_password) = state + .password_manager + .hash(&mut rng, password) + .await + .unwrap(); + + repo.user_password() + .add( + &mut rng, + &state.clock, + &user, + version, + hashed_password, + None, + ) + .await + .unwrap(); + let browser_session = repo + .browser_session() + .add(&mut rng, &state.clock, &user, None) + .await + .unwrap(); + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let cookie_jar = cookie_jar.set_session(&browser_session); + + let user_id = user.id; + + let request = Request::post("/graphql").json(serde_json::json!({ + "query": format!(r#" + mutation {{ + setPassword(input: {{ + userId: "user:{user_id}", + currentPassword: "wrong.password.123", + newPassword: "new.password.123" + }}) {{ + status + }} + }} + "#), + })); + + let cookies = CookieHelper::new(); + cookies.import(cookie_jar); + let request = cookies.with_cookies(request); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + assert_eq!( + response.data["setPassword"]["status"].as_str(), + Some("WRONG_PASSWORD"), + "{:?}", + response.data + ); +} + +/// Test the startEmailAuthentication mutation where the current password +/// provided is invalid. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_start_email_authentication_rejected_wrong_password(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let password = Zeroizing::new("current.password.123".to_owned()); + let (version, hashed_password) = state + .password_manager + .hash(&mut rng, password) + .await + .unwrap(); + + repo.user_password() + .add( + &mut rng, + &state.clock, + &user, + version, + hashed_password, + None, + ) + .await + .unwrap(); + let browser_session = repo + .browser_session() + .add(&mut rng, &state.clock, &user, None) + .await + .unwrap(); + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let cookie_jar = cookie_jar.set_session(&browser_session); + + let request = Request::post("/graphql").json(serde_json::json!({ + "query": r#" + mutation { + startEmailAuthentication(input: { + email: "alice@example.org", + password: "wrong.password.123" + }) { + status + } + } + "#, + })); + + let cookies = CookieHelper::new(); + cookies.import(cookie_jar); + let request = cookies.with_cookies(request); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + assert_eq!( + response.data["startEmailAuthentication"]["status"].as_str(), + Some("INCORRECT_PASSWORD"), + "{:?}", + response.data + ); +} + +/// Test the removeEmail mutation where the current password +/// provided is invalid. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_remove_email_rejected_wrong_password(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let password = Zeroizing::new("current.password.123".to_owned()); + let (version, hashed_password) = state + .password_manager + .hash(&mut rng, password) + .await + .unwrap(); + + repo.user_password() + .add( + &mut rng, + &state.clock, + &user, + version, + hashed_password, + None, + ) + .await + .unwrap(); + let user_email_id = repo + .user_email() + .add( + &mut rng, + &state.clock, + &user, + "alice@example.org".to_owned(), + ) + .await + .unwrap() + .id; + let browser_session = repo + .browser_session() + .add(&mut rng, &state.clock, &user, None) + .await + .unwrap(); + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let cookie_jar = cookie_jar.set_session(&browser_session); + + let request = Request::post("/graphql").json(serde_json::json!({ + "query": format!(r#" + mutation {{ + removeEmail(input: {{ + userEmailId: "user_email:{user_email_id}", + password: "wrong.password.123" + }}) {{ + status + }} + }} + "#), + })); + + let cookies = CookieHelper::new(); + cookies.import(cookie_jar); + let request = cookies.with_cookies(request); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + assert_eq!( + response.data["removeEmail"]["status"].as_str(), + Some("INCORRECT_PASSWORD"), + "{:?}", + response.data + ); +} + +/// Test the deactivateUser mutation where the current password +/// provided is invalid. +#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] +async fn test_deactivate_user_rejected_wrong_password(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, "alice".to_owned()) + .await + .unwrap(); + let password = Zeroizing::new("current.password.123".to_owned()); + let (version, hashed_password) = state + .password_manager + .hash(&mut rng, password) + .await + .unwrap(); + + repo.user_password() + .add( + &mut rng, + &state.clock, + &user, + version, + hashed_password, + None, + ) + .await + .unwrap(); + let browser_session = repo + .browser_session() + .add(&mut rng, &state.clock, &user, None) + .await + .unwrap(); + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let cookie_jar = cookie_jar.set_session(&browser_session); + + let request = Request::post("/graphql").json(serde_json::json!({ + "query": r#" + mutation { + deactivateUser(input: { + hsErase: true, + password: "wrong.password.123" + }) { + status + } + } + "#, + })); + + let cookies = CookieHelper::new(); + cookies.import(cookie_jar); + let request = cookies.with_cookies(request); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: GraphQLResponse = response.json(); + assert!(response.errors.is_empty(), "{:?}", response.errors); + assert_eq!( + response.data["deactivateUser"]["status"].as_str(), + Some("INCORRECT_PASSWORD"), + "{:?}", + response.data + ); +} diff --git a/matrix-authentication-service/crates/handlers/src/health.rs b/matrix-authentication-service/crates/handlers/src/health.rs new file mode 100644 index 00000000..916df60d --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/health.rs @@ -0,0 +1,41 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{extract::State, response::IntoResponse}; +use mas_axum_utils::InternalError; +use sqlx::PgPool; +use tracing::{Instrument, info_span}; + +pub async fn get(State(pool): State) -> Result { + let mut conn = pool.acquire().await?; + + sqlx::query("SELECT $1") + .bind(1_i64) + .execute(&mut *conn) + .instrument(info_span!("DB health")) + .await?; + + Ok("ok") +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + + use super::*; + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_get_health(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let request = Request::get("/health").empty(); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + assert_eq!(response.body(), "ok"); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/lib.rs b/matrix-authentication-service/crates/handlers/src/lib.rs new file mode 100644 index 00000000..0cb450f5 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/lib.rs @@ -0,0 +1,512 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![deny(clippy::future_not_send)] +#![allow( + // Some axum handlers need that + clippy::unused_async, + // Because of how axum handlers work, we sometime have take many arguments + clippy::too_many_arguments, + // Code generated by tracing::instrument trigger this when returning an `impl Trait` + // See https://github.com/tokio-rs/tracing/issues/2613 + clippy::let_with_type_underscore, +)] + +use std::{ + convert::Infallible, + sync::{Arc, LazyLock}, + time::Duration, +}; + +use axum::{ + Extension, Router, + extract::{FromRef, FromRequestParts, OriginalUri, RawQuery, State}, + http::Method, + response::{Html, IntoResponse}, + routing::{get, post}, +}; +use headers::HeaderName; +use hyper::{ + StatusCode, Version, + header::{ + ACCEPT, ACCEPT_LANGUAGE, AUTHORIZATION, CONTENT_LANGUAGE, CONTENT_LENGTH, CONTENT_TYPE, + }, +}; +use mas_axum_utils::{InternalError, cookies::CookieJar}; +use mas_data_model::SiteConfig; +use mas_http::CorsLayerExt; +use mas_keystore::{Encrypter, Keystore}; +use mas_matrix::HomeserverConnection; +use mas_policy::Policy; +use mas_router::{Route, UrlBuilder}; +use mas_storage::{BoxRepository, BoxRepositoryFactory}; +use mas_templates::{ErrorContext, NotFoundContext, TemplateContext, Templates}; +use opentelemetry::metrics::Meter; +use sqlx::PgPool; +use tower::util::AndThenLayer; +use tower_http::cors::{Any, CorsLayer}; + +use self::{graphql::ExtraRouterParameters, passwords::PasswordManager}; + +mod admin; +mod compat; +mod graphql; +mod health; +mod oauth2; +pub mod passwords; +pub mod upstream_oauth2; +mod views; + +mod activity_tracker; +mod captcha; +#[cfg(test)] +mod cleanup_tests; +mod preferred_language; +mod rate_limit; +mod session; +#[cfg(test)] +mod test_utils; + +static METER: LazyLock = LazyLock::new(|| { + let scope = opentelemetry::InstrumentationScope::builder(env!("CARGO_PKG_NAME")) + .with_version(env!("CARGO_PKG_VERSION")) + .with_schema_url(opentelemetry_semantic_conventions::SCHEMA_URL) + .build(); + + opentelemetry::global::meter_with_scope(scope) +}); + +/// Implement `From` for `RouteError`, for "internal server error" kind of +/// errors. +#[macro_export] +macro_rules! impl_from_error_for_route { + ($route_error:ty : $error:ty) => { + impl From<$error> for $route_error { + fn from(e: $error) -> Self { + Self::Internal(Box::new(e)) + } + } + }; + ($error:ty) => { + impl_from_error_for_route!(self::RouteError: $error); + }; +} + +pub use mas_axum_utils::{ErrorWrapper, cookies::CookieManager}; +use mas_data_model::{BoxClock, BoxRng}; + +pub use self::{ + activity_tracker::{ActivityTracker, Bound as BoundActivityTracker}, + admin::router as admin_api_router, + graphql::{ + Schema as GraphQLSchema, schema as graphql_schema, schema_builder as graphql_schema_builder, + }, + preferred_language::PreferredLanguage, + rate_limit::{Limiter, RequesterFingerprint}, + upstream_oauth2::cache::MetadataCache, +}; + +pub fn healthcheck_router() -> Router +where + S: Clone + Send + Sync + 'static, + PgPool: FromRef, +{ + Router::new().route(mas_router::Healthcheck::route(), get(self::health::get)) +} + +pub fn graphql_router(playground: bool, undocumented_oauth2_access: bool) -> Router +where + S: Clone + Send + Sync + 'static, + graphql::Schema: FromRef, + BoundActivityTracker: FromRequestParts, + BoxRepository: FromRequestParts, + BoxClock: FromRequestParts, + Encrypter: FromRef, + CookieJar: FromRequestParts, + Limiter: FromRef, + RequesterFingerprint: FromRequestParts, +{ + let mut router = Router::new() + .route( + mas_router::GraphQL::route(), + get(self::graphql::get).post(self::graphql::post), + ) + // Pass the undocumented_oauth2_access parameter through the request extension, as it is + // per-listener + .layer(Extension(ExtraRouterParameters { + undocumented_oauth2_access, + })) + .layer( + CorsLayer::new() + .allow_origin(Any) + .allow_methods(Any) + .allow_otel_headers([ + AUTHORIZATION, + ACCEPT, + ACCEPT_LANGUAGE, + CONTENT_LANGUAGE, + CONTENT_TYPE, + ]), + ); + + if playground { + router = router.route( + mas_router::GraphQLPlayground::route(), + get(self::graphql::playground), + ); + } + + router +} + +pub fn discovery_router() -> Router +where + S: Clone + Send + Sync + 'static, + Keystore: FromRef, + SiteConfig: FromRef, + UrlBuilder: FromRef, + BoxClock: FromRequestParts, + BoxRng: FromRequestParts, +{ + Router::new() + .route( + mas_router::OidcConfiguration::route(), + get(self::oauth2::discovery::get), + ) + .route( + mas_router::Webfinger::route(), + get(self::oauth2::webfinger::get), + ) + .layer( + CorsLayer::new() + .allow_origin(Any) + .allow_methods(Any) + .allow_otel_headers([ + AUTHORIZATION, + ACCEPT, + ACCEPT_LANGUAGE, + CONTENT_LANGUAGE, + CONTENT_TYPE, + ]) + .max_age(Duration::from_secs(60 * 60)), + ) +} + +pub fn api_router() -> Router +where + S: Clone + Send + Sync + 'static, + Keystore: FromRef, + UrlBuilder: FromRef, + BoxRepository: FromRequestParts, + ActivityTracker: FromRequestParts, + BoundActivityTracker: FromRequestParts, + Encrypter: FromRef, + reqwest::Client: FromRef, + SiteConfig: FromRef, + Templates: FromRef, + Arc: FromRef, + BoxClock: FromRequestParts, + BoxRng: FromRequestParts, + Policy: FromRequestParts, +{ + // All those routes are API-like, with a common CORS layer + Router::new() + .route( + mas_router::OAuth2Keys::route(), + get(self::oauth2::keys::get), + ) + .route( + mas_router::OidcUserinfo::route(), + get(self::oauth2::userinfo::get).post(self::oauth2::userinfo::get), + ) + .route( + mas_router::OAuth2Introspection::route(), + post(self::oauth2::introspection::post), + ) + .route( + mas_router::OAuth2Revocation::route(), + post(self::oauth2::revoke::post), + ) + .route( + mas_router::OAuth2TokenEndpoint::route(), + post(self::oauth2::token::post), + ) + .route( + mas_router::OAuth2RegistrationEndpoint::route(), + post(self::oauth2::registration::post), + ) + .route( + mas_router::OAuth2DeviceAuthorizationEndpoint::route(), + post(self::oauth2::device::authorize::post), + ) + .layer( + CorsLayer::new() + .allow_origin(Any) + .allow_methods(Any) + .allow_otel_headers([ + AUTHORIZATION, + ACCEPT, + ACCEPT_LANGUAGE, + CONTENT_LANGUAGE, + CONTENT_TYPE, + // Swagger will send this header, so we have to allow it to avoid CORS errors + HeaderName::from_static("x-requested-with"), + ]) + .max_age(Duration::from_secs(60 * 60)), + ) +} + +#[allow(clippy::trait_duplication_in_bounds)] +pub fn compat_router(templates: Templates) -> Router +where + S: Clone + Send + Sync + 'static, + UrlBuilder: FromRef, + SiteConfig: FromRef, + Arc: FromRef, + PasswordManager: FromRef, + Limiter: FromRef, + BoxRepositoryFactory: FromRef, + BoundActivityTracker: FromRequestParts, + RequesterFingerprint: FromRequestParts, + BoxRepository: FromRequestParts, + BoxClock: FromRequestParts, + BoxRng: FromRequestParts, + Policy: FromRequestParts, +{ + // A sub-router for human-facing routes with error handling + let human_router = Router::new() + .route( + mas_router::CompatLoginSsoRedirect::route(), + get(self::compat::login_sso_redirect::get), + ) + .route( + mas_router::CompatLoginSsoRedirectIdp::route(), + get(self::compat::login_sso_redirect::get), + ) + .route( + mas_router::CompatLoginSsoRedirectSlash::route(), + get(self::compat::login_sso_redirect::get), + ) + .layer(AndThenLayer::new( + async move |response: axum::response::Response| { + Ok::<_, Infallible>(recover_error(&templates, response)) + }, + )); + + // A sub-router for API-facing routes with CORS + let api_router = Router::new() + .route( + mas_router::CompatLogin::route(), + get(self::compat::login::get).post(self::compat::login::post), + ) + .route( + mas_router::CompatLogout::route(), + post(self::compat::logout::post), + ) + .route( + mas_router::CompatLogoutAll::route(), + post(self::compat::logout_all::post), + ) + .route( + mas_router::CompatRefresh::route(), + post(self::compat::refresh::post), + ) + .layer( + CorsLayer::new() + .allow_origin(Any) + .allow_methods(Any) + .allow_otel_headers([ + AUTHORIZATION, + ACCEPT, + ACCEPT_LANGUAGE, + CONTENT_LANGUAGE, + CONTENT_TYPE, + HeaderName::from_static("x-requested-with"), + ]) + .max_age(Duration::from_secs(60 * 60)), + ); + + Router::new().merge(human_router).merge(api_router) +} + +pub fn human_router(templates: Templates) -> Router +where + S: Clone + Send + Sync + 'static, + UrlBuilder: FromRef, + PreferredLanguage: FromRequestParts, + BoxRepository: FromRequestParts, + CookieJar: FromRequestParts, + BoundActivityTracker: FromRequestParts, + RequesterFingerprint: FromRequestParts, + Encrypter: FromRef, + Templates: FromRef, + Keystore: FromRef, + PasswordManager: FromRef, + MetadataCache: FromRef, + SiteConfig: FromRef, + Limiter: FromRef, + reqwest::Client: FromRef, + Arc: FromRef, + BoxClock: FromRequestParts, + BoxRng: FromRequestParts, + Policy: FromRequestParts, +{ + Router::new() + // XXX: hard-coded redirect from /account to /account/ + .route( + "/account", + get( + async |State(url_builder): State, RawQuery(query): RawQuery| { + let prefix = url_builder.prefix().unwrap_or_default(); + let route = mas_router::Account::route(); + let destination = if let Some(query) = query { + format!("{prefix}{route}?{query}") + } else { + format!("{prefix}{route}") + }; + + axum::response::Redirect::to(&destination) + }, + ), + ) + .route(mas_router::Account::route(), get(self::views::app::get)) + .route( + mas_router::AccountWildcard::route(), + get(self::views::app::get), + ) + .route( + mas_router::AccountRecoveryFinish::route(), + get(self::views::app::get_anonymous), + ) + .route( + mas_router::ChangePasswordDiscovery::route(), + get(async |State(url_builder): State| { + url_builder.redirect(&mas_router::AccountPasswordChange) + }), + ) + .route(mas_router::Index::route(), get(self::views::index::get)) + .route( + mas_router::Login::route(), + get(self::views::login::get).post(self::views::login::post), + ) + .route(mas_router::Logout::route(), post(self::views::logout::post)) + .route( + mas_router::Register::route(), + get(self::views::register::get), + ) + .route( + mas_router::PasswordRegister::route(), + get(self::views::register::password::get).post(self::views::register::password::post), + ) + .route( + mas_router::RegisterVerifyEmail::route(), + get(self::views::register::steps::verify_email::get) + .post(self::views::register::steps::verify_email::post), + ) + .route( + mas_router::RegisterToken::route(), + get(self::views::register::steps::registration_token::get) + .post(self::views::register::steps::registration_token::post), + ) + .route( + mas_router::RegisterDisplayName::route(), + get(self::views::register::steps::display_name::get) + .post(self::views::register::steps::display_name::post), + ) + .route( + mas_router::RegisterFinish::route(), + get(self::views::register::steps::finish::get), + ) + .route( + mas_router::AccountRecoveryStart::route(), + get(self::views::recovery::start::get).post(self::views::recovery::start::post), + ) + .route( + mas_router::AccountRecoveryProgress::route(), + get(self::views::recovery::progress::get).post(self::views::recovery::progress::post), + ) + .route( + mas_router::OAuth2AuthorizationEndpoint::route(), + get(self::oauth2::authorization::get), + ) + .route( + mas_router::Consent::route(), + get(self::oauth2::authorization::consent::get) + .post(self::oauth2::authorization::consent::post), + ) + .route( + mas_router::CompatLoginSsoComplete::route(), + get(self::compat::login_sso_complete::get).post(self::compat::login_sso_complete::post), + ) + .route( + mas_router::UpstreamOAuth2Authorize::route(), + get(self::upstream_oauth2::authorize::get), + ) + .route( + mas_router::UpstreamOAuth2Callback::route(), + get(self::upstream_oauth2::callback::handler) + .post(self::upstream_oauth2::callback::handler), + ) + .route( + mas_router::UpstreamOAuth2Link::route(), + get(self::upstream_oauth2::link::get).post(self::upstream_oauth2::link::post), + ) + .route( + mas_router::UpstreamOAuth2BackchannelLogout::route(), + post(self::upstream_oauth2::backchannel_logout::post), + ) + .route( + mas_router::DeviceCodeLink::route(), + get(self::oauth2::device::link::get), + ) + .route( + mas_router::DeviceCodeConsent::route(), + get(self::oauth2::device::consent::get).post(self::oauth2::device::consent::post), + ) + .layer(AndThenLayer::new( + async move |response: axum::response::Response| { + Ok::<_, Infallible>(recover_error(&templates, response)) + }, + )) +} + +fn recover_error( + templates: &Templates, + response: axum::response::Response, +) -> axum::response::Response { + // Error responses should have an ErrorContext attached to them + let ext = response.extensions().get::(); + if let Some(ctx) = ext + && let Ok(res) = templates.render_error(ctx) + { + let (mut parts, _original_body) = response.into_parts(); + parts.headers.remove(CONTENT_TYPE); + parts.headers.remove(CONTENT_LENGTH); + return (parts, Html(res)).into_response(); + } + + response +} + +/// The fallback handler for all routes that don't match anything else. +/// +/// # Errors +/// +/// Returns an error if the template rendering fails. +pub async fn fallback( + State(templates): State, + OriginalUri(uri): OriginalUri, + method: Method, + version: Version, + PreferredLanguage(locale): PreferredLanguage, +) -> Result { + let ctx = NotFoundContext::new(&method, version, &uri).with_language(locale); + // XXX: this should look at the Accept header and return JSON if requested + + let res = templates.render_not_found(&ctx)?; + + Ok((StatusCode::NOT_FOUND, Html(res))) +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/authorization/callback.rs b/matrix-authentication-service/crates/handlers/src/oauth2/authorization/callback.rs new file mode 100644 index 00000000..01f59d60 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/authorization/callback.rs @@ -0,0 +1,166 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::module_name_repetitions)] + +use std::collections::HashMap; + +use axum::response::{Html, IntoResponse, Redirect, Response}; +use mas_data_model::AuthorizationGrant; +use mas_i18n::DataLocale; +use mas_templates::{FormPostContext, Templates}; +use oauth2_types::requests::ResponseMode; +use serde::Serialize; +use thiserror::Error; +use url::Url; + +#[derive(Debug, Clone)] +enum CallbackDestinationMode { + Query { + existing_params: HashMap, + }, + Fragment, + FormPost, +} + +#[derive(Debug, Clone)] +pub struct CallbackDestination { + mode: CallbackDestinationMode, + safe_redirect_uri: Url, + state: Option, +} + +#[derive(Debug, Error)] +pub enum IntoCallbackDestinationError { + #[error("Redirect URI can't have a fragment")] + RedirectUriFragmentNotAllowed, + + #[error("Existing query parameters are not valid")] + RedirectUriInvalidQueryParams(#[from] serde_urlencoded::de::Error), + + #[error("Requested response_mode is not supported")] + UnsupportedResponseMode, +} + +#[derive(Debug, Error)] +pub enum CallbackDestinationError { + #[error("Failed to render the form_post template")] + FormPostRender(#[from] mas_templates::TemplateError), + + #[error("Failed to serialize parameters query string")] + ParamsSerialization(#[from] serde_urlencoded::ser::Error), +} + +impl TryFrom<&AuthorizationGrant> for CallbackDestination { + type Error = IntoCallbackDestinationError; + + fn try_from(value: &AuthorizationGrant) -> Result { + Self::try_new( + &value.response_mode, + value.redirect_uri.clone(), + value.state.clone(), + ) + } +} + +impl CallbackDestination { + pub fn try_new( + mode: &ResponseMode, + mut redirect_uri: Url, + state: Option, + ) -> Result { + if redirect_uri.fragment().is_some() { + return Err(IntoCallbackDestinationError::RedirectUriFragmentNotAllowed); + } + + let mode = match mode { + ResponseMode::Query => { + let existing_params = redirect_uri + .query() + .map(serde_urlencoded::from_str) + .transpose()? + .unwrap_or_default(); + + // Remove the query from the URL + redirect_uri.set_query(None); + + CallbackDestinationMode::Query { existing_params } + } + ResponseMode::Fragment => CallbackDestinationMode::Fragment, + ResponseMode::FormPost => CallbackDestinationMode::FormPost, + _ => return Err(IntoCallbackDestinationError::UnsupportedResponseMode), + }; + + Ok(Self { + mode, + safe_redirect_uri: redirect_uri, + state, + }) + } + + pub fn go( + self, + templates: &Templates, + locale: &DataLocale, + params: T, + ) -> Result { + #[derive(Serialize)] + struct AllParams<'s, T> { + #[serde(flatten, skip_serializing_if = "Option::is_none")] + existing: Option<&'s HashMap>, + + #[serde(skip_serializing_if = "Option::is_none")] + state: Option, + + #[serde(flatten)] + params: T, + } + + let mut redirect_uri = self.safe_redirect_uri; + let state = self.state; + + match self.mode { + CallbackDestinationMode::Query { existing_params } => { + let merged = AllParams { + existing: Some(&existing_params), + state, + params, + }; + + let new_qs = serde_urlencoded::to_string(merged)?; + + redirect_uri.set_query(Some(&new_qs)); + + Ok(Redirect::to(redirect_uri.as_str()).into_response()) + } + + CallbackDestinationMode::Fragment => { + let merged = AllParams { + existing: None, + state, + params, + }; + + let new_qs = serde_urlencoded::to_string(merged)?; + + redirect_uri.set_fragment(Some(&new_qs)); + + Ok(Redirect::to(redirect_uri.as_str()).into_response()) + } + + CallbackDestinationMode::FormPost => { + let merged = AllParams { + existing: None, + state, + params, + }; + let ctx = FormPostContext::new_for_url(redirect_uri, merged).with_language(locale); + let rendered = templates.render_form_post(&ctx)?; + Ok(Html(rendered).into_response()) + } + } + } +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/authorization/consent.rs b/matrix-authentication-service/crates/handlers/src/oauth2/authorization/consent.rs new file mode 100644 index 00000000..ab51bef1 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/authorization/consent.rs @@ -0,0 +1,361 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{sync::Arc, time::Duration}; + +use axum::{ + extract::{Form, Path, State}, + response::{Html, IntoResponse, Response}, +}; +use axum_extra::TypedHeader; +use hyper::StatusCode; +use mas_axum_utils::{ + GenericError, InternalError, + cookies::CookieJar, + csrf::{CsrfExt, ProtectedForm}, +}; +use mas_data_model::{AuthorizationGrantStage, BoxClock, BoxRng, MatrixUser}; +use mas_keystore::Keystore; +use mas_matrix::HomeserverConnection; +use mas_policy::Policy; +use mas_router::{PostAuthAction, UrlBuilder}; +use mas_storage::{ + BoxRepository, + oauth2::{OAuth2AuthorizationGrantRepository, OAuth2ClientRepository}, +}; +use mas_templates::{ConsentContext, PolicyViolationContext, TemplateContext, Templates}; +use oauth2_types::requests::AuthorizationResponse; +use thiserror::Error; +use ulid::Ulid; + +use super::callback::CallbackDestination; +use crate::{ + BoundActivityTracker, PreferredLanguage, impl_from_error_for_route, + oauth2::generate_id_token, + session::{SessionOrFallback, count_user_sessions_for_limiting, load_session_or_fallback}, +}; + +#[derive(Debug, Error)] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error(transparent)] + Csrf(#[from] mas_axum_utils::csrf::CsrfError), + + #[error("Authorization grant not found")] + GrantNotFound, + + #[error("Authorization grant {0} already used")] + GrantNotPending(Ulid), + + #[error("Failed to load client {0}")] + NoSuchClient(Ulid), +} + +impl_from_error_for_route!(mas_templates::TemplateError); +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_policy::LoadError); +impl_from_error_for_route!(mas_policy::EvaluationError); +impl_from_error_for_route!(crate::session::SessionLoadError); +impl_from_error_for_route!(crate::oauth2::IdTokenSignatureError); +impl_from_error_for_route!(super::callback::IntoCallbackDestinationError); +impl_from_error_for_route!(super::callback::CallbackDestinationError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + match self { + Self::Internal(e) => InternalError::new(e).into_response(), + e @ Self::NoSuchClient(_) => InternalError::new(Box::new(e)).into_response(), + e @ Self::GrantNotFound => GenericError::new(StatusCode::NOT_FOUND, e).into_response(), + e @ Self::GrantNotPending(_) => { + GenericError::new(StatusCode::CONFLICT, e).into_response() + } + e @ Self::Csrf(_) => GenericError::new(StatusCode::BAD_REQUEST, e).into_response(), + } + } +} + +#[tracing::instrument( + name = "handlers.oauth2.authorization.consent.get", + fields(grant.id = %grant_id), + skip_all, +)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + State(homeserver): State>, + mut policy: Policy, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + user_agent: Option>, + cookie_jar: CookieJar, + Path(grant_id): Path, +) -> Result { + let (cookie_jar, maybe_session) = match load_session_or_fallback( + cookie_jar, &clock, &mut rng, &templates, &locale, &mut repo, + ) + .await? + { + SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session, + .. + } => (cookie_jar, maybe_session), + SessionOrFallback::Fallback { response } => return Ok(response), + }; + + let user_agent = user_agent.map(|ua| ua.to_string()); + + let grant = repo + .oauth2_authorization_grant() + .lookup(grant_id) + .await? + .ok_or(RouteError::GrantNotFound)?; + + let client = repo + .oauth2_client() + .lookup(grant.client_id) + .await? + .ok_or(RouteError::NoSuchClient(grant.client_id))?; + + if !matches!(grant.stage, AuthorizationGrantStage::Pending) { + return Err(RouteError::GrantNotPending(grant.id)); + } + + let Some(session) = maybe_session else { + let login = mas_router::Login::and_continue_grant(grant_id); + return Ok((cookie_jar, url_builder.redirect(&login)).into_response()); + }; + + activity_tracker + .record_browser_session(&clock, &session) + .await; + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let session_counts = count_user_sessions_for_limiting(&mut repo, &session.user).await?; + + // We can close the repository early, we don't need it at this point + repo.save().await?; + + let res = policy + .evaluate_authorization_grant(mas_policy::AuthorizationGrantInput { + user: Some(&session.user), + client: &client, + session_counts: Some(session_counts), + scope: &grant.scope, + grant_type: mas_policy::GrantType::AuthorizationCode, + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent, + }, + }) + .await?; + if !res.valid() { + let ctx = PolicyViolationContext::for_authorization_grant(grant, client) + .with_session(session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_policy_violation(&ctx)?; + + return Ok((cookie_jar, Html(content)).into_response()); + } + + // Fetch informations about the user. This is purely cosmetic, so we let it + // fail and put a 1s timeout to it in case we fail to query it + // XXX: we're likely to need this in other places + let localpart = &session.user.username; + let display_name = match tokio::time::timeout( + Duration::from_secs(1), + homeserver.query_user(localpart), + ) + .await + { + Ok(Ok(user)) => user.displayname, + Ok(Err(err)) => { + tracing::warn!( + error = &*err as &dyn std::error::Error, + localpart, + "Failed to query user" + ); + None + } + Err(_) => { + tracing::warn!(localpart, "Timed out while querying user"); + None + } + }; + + let matrix_user = MatrixUser { + mxid: homeserver.mxid(localpart), + display_name, + }; + + let ctx = ConsentContext::new(grant, client, matrix_user) + .with_session(session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_consent(&ctx)?; + + Ok((cookie_jar, Html(content)).into_response()) +} + +#[tracing::instrument( + name = "handlers.oauth2.authorization.consent.post", + fields(grant.id = %grant_id), + skip_all, +)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(key_store): State, + mut policy: Policy, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + user_agent: Option>, + cookie_jar: CookieJar, + State(url_builder): State, + Path(grant_id): Path, + Form(form): Form>, +) -> Result { + cookie_jar.verify_form(&clock, form)?; + + let (cookie_jar, maybe_session) = match load_session_or_fallback( + cookie_jar, &clock, &mut rng, &templates, &locale, &mut repo, + ) + .await? + { + SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session, + .. + } => (cookie_jar, maybe_session), + SessionOrFallback::Fallback { response } => return Ok(response), + }; + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let user_agent = user_agent.map(|ua| ua.to_string()); + + let grant = repo + .oauth2_authorization_grant() + .lookup(grant_id) + .await? + .ok_or(RouteError::GrantNotFound)?; + let callback_destination = CallbackDestination::try_from(&grant)?; + + let Some(browser_session) = maybe_session else { + let next = PostAuthAction::continue_grant(grant_id); + let login = mas_router::Login::and_then(next); + return Ok((cookie_jar, url_builder.redirect(&login)).into_response()); + }; + + activity_tracker + .record_browser_session(&clock, &browser_session) + .await; + + let client = repo + .oauth2_client() + .lookup(grant.client_id) + .await? + .ok_or(RouteError::NoSuchClient(grant.client_id))?; + + if !matches!(grant.stage, AuthorizationGrantStage::Pending) { + return Err(RouteError::GrantNotPending(grant.id)); + } + + let session_counts = count_user_sessions_for_limiting(&mut repo, &browser_session.user).await?; + + let res = policy + .evaluate_authorization_grant(mas_policy::AuthorizationGrantInput { + user: Some(&browser_session.user), + client: &client, + session_counts: Some(session_counts), + scope: &grant.scope, + grant_type: mas_policy::GrantType::AuthorizationCode, + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent, + }, + }) + .await?; + + if !res.valid() { + let ctx = PolicyViolationContext::for_authorization_grant(grant, client) + .with_session(browser_session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_policy_violation(&ctx)?; + + return Ok((cookie_jar, Html(content)).into_response()); + } + + // All good, let's start the session + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut rng, + &clock, + &client, + &browser_session, + grant.scope.clone(), + ) + .await?; + + let grant = repo + .oauth2_authorization_grant() + .fulfill(&clock, &session, grant) + .await?; + + let mut params = AuthorizationResponse::default(); + + // Did they request an ID token? + if grant.response_type_id_token { + // Fetch the last authentication + let last_authentication = repo + .browser_session() + .get_last_authentication(&browser_session) + .await?; + + params.id_token = Some(generate_id_token( + &mut rng, + &clock, + &url_builder, + &key_store, + &client, + Some(&grant), + &browser_session, + None, + last_authentication.as_ref(), + )?); + } + + // Did they request an auth code? + if let Some(code) = grant.code { + params.code = Some(code.code); + } + + repo.save().await?; + + activity_tracker + .record_oauth2_session(&clock, &session) + .await; + + Ok(( + cookie_jar, + callback_destination.go(&templates, &locale, params)?, + ) + .into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/authorization/mod.rs b/matrix-authentication-service/crates/handlers/src/oauth2/authorization/mod.rs new file mode 100644 index 00000000..6037bd9a --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/authorization/mod.rs @@ -0,0 +1,317 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + extract::{Form, State}, + response::{IntoResponse, Response}, +}; +use hyper::StatusCode; +use mas_axum_utils::{GenericError, InternalError, SessionInfoExt, cookies::CookieJar}; +use mas_data_model::{AuthorizationCode, BoxClock, BoxRng, Pkce}; +use mas_router::{PostAuthAction, UrlBuilder}; +use mas_storage::{ + BoxRepository, + oauth2::{OAuth2AuthorizationGrantRepository, OAuth2ClientRepository}, +}; +use mas_templates::Templates; +use oauth2_types::{ + errors::{ClientError, ClientErrorCode}, + pkce, + requests::{AuthorizationRequest, GrantType, Prompt, ResponseMode}, + response_type::ResponseType, +}; +use rand::{Rng, distributions::Alphanumeric}; +use serde::Deserialize; +use thiserror::Error; + +use self::callback::CallbackDestination; +use crate::{BoundActivityTracker, PreferredLanguage, impl_from_error_for_route}; + +mod callback; +pub(crate) mod consent; + +#[derive(Debug, Error)] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("could not find client")] + ClientNotFound, + + #[error("invalid response mode")] + InvalidResponseMode, + + #[error("invalid parameters")] + IntoCallbackDestination(#[from] self::callback::IntoCallbackDestinationError), + + #[error("invalid redirect uri")] + UnknownRedirectUri(#[from] mas_data_model::InvalidRedirectUriError), +} + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + match self { + Self::Internal(e) => InternalError::new(e).into_response(), + e @ (Self::ClientNotFound + | Self::InvalidResponseMode + | Self::IntoCallbackDestination(_) + | Self::UnknownRedirectUri(_)) => { + GenericError::new(StatusCode::BAD_REQUEST, e).into_response() + } + } + } +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_templates::TemplateError); +impl_from_error_for_route!(self::callback::CallbackDestinationError); +impl_from_error_for_route!(mas_policy::LoadError); +impl_from_error_for_route!(mas_policy::EvaluationError); + +#[derive(Deserialize)] +pub(crate) struct Params { + #[serde(flatten)] + auth: AuthorizationRequest, + + #[serde(flatten)] + pkce: Option, +} + +/// Given a list of response types and an optional user-defined response mode, +/// figure out what response mode must be used, and emit an error if the +/// suggested response mode isn't allowed for the given response types. +fn resolve_response_mode( + response_type: &ResponseType, + suggested_response_mode: Option, +) -> Result { + use ResponseMode as M; + + // If the response type includes either "token" or "id_token", the default + // response mode is "fragment" and the response mode "query" must not be + // used + if response_type.has_token() || response_type.has_id_token() { + match suggested_response_mode { + None => Ok(M::Fragment), + Some(M::Query) => Err(RouteError::InvalidResponseMode), + Some(mode) => Ok(mode), + } + } else { + // In other cases, all response modes are allowed, defaulting to "query" + Ok(suggested_response_mode.unwrap_or(M::Query)) + } +} + +#[tracing::instrument( + name = "handlers.oauth2.authorization.get", + fields(client.id = %params.auth.client_id), + skip_all, +)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + activity_tracker: BoundActivityTracker, + mut repo: BoxRepository, + cookie_jar: CookieJar, + Form(params): Form, +) -> Result { + // First, figure out what client it is + let client = repo + .oauth2_client() + .find_by_client_id(¶ms.auth.client_id) + .await? + .ok_or(RouteError::ClientNotFound)?; + + // And resolve the redirect_uri and response_mode + let redirect_uri = client + .resolve_redirect_uri(¶ms.auth.redirect_uri)? + .clone(); + let response_type = params.auth.response_type; + let response_mode = resolve_response_mode(&response_type, params.auth.response_mode)?; + + // Now we have a proper callback destination to go to on error + let callback_destination = CallbackDestination::try_new( + &response_mode, + redirect_uri.clone(), + params.auth.state.clone(), + )?; + + // Get the session info from the cookie + let (session_info, cookie_jar) = cookie_jar.session_info(); + + // One day, we will have try blocks + let res: Result = ({ + let templates = templates.clone(); + let callback_destination = callback_destination.clone(); + let locale = locale.clone(); + async move { + let maybe_session = session_info.load_active_session(&mut repo).await?; + let prompt = params.auth.prompt.as_deref().unwrap_or_default(); + + // Check if the request/request_uri/registration params are used. If so, reply + // with the right error since we don't support them. + if params.auth.request.is_some() { + return Ok(callback_destination.go( + &templates, + &locale, + ClientError::from(ClientErrorCode::RequestNotSupported), + )?); + } + + if params.auth.request_uri.is_some() { + return Ok(callback_destination.go( + &templates, + &locale, + ClientError::from(ClientErrorCode::RequestUriNotSupported), + )?); + } + + // Check if the client asked for a `token` response type, and bail out if it's + // the case, since we don't support them + if response_type.has_token() { + return Ok(callback_destination.go( + &templates, + &locale, + ClientError::from(ClientErrorCode::UnsupportedResponseType), + )?); + } + + // If the client asked for a `id_token` response type, we must check if it can + // use the `implicit` grant type + if response_type.has_id_token() && !client.grant_types.contains(&GrantType::Implicit) { + return Ok(callback_destination.go( + &templates, + &locale, + ClientError::from(ClientErrorCode::UnauthorizedClient), + )?); + } + + if params.auth.registration.is_some() { + return Ok(callback_destination.go( + &templates, + &locale, + ClientError::from(ClientErrorCode::RegistrationNotSupported), + )?); + } + + // Fail early if prompt=none; we never let it go through + if prompt.contains(&Prompt::None) { + return Ok(callback_destination.go( + &templates, + &locale, + ClientError::from(ClientErrorCode::LoginRequired), + )?); + } + + let code: Option = if response_type.has_code() { + // Check if it is allowed to use this grant type + if !client.grant_types.contains(&GrantType::AuthorizationCode) { + return Ok(callback_destination.go( + &templates, + &locale, + ClientError::from(ClientErrorCode::UnauthorizedClient), + )?); + } + + // 32 random alphanumeric characters, about 190bit of entropy + let code: String = (&mut rng) + .sample_iter(&Alphanumeric) + .take(32) + .map(char::from) + .collect(); + + let pkce = params.pkce.map(|p| Pkce { + challenge: p.code_challenge, + challenge_method: p.code_challenge_method, + }); + + Some(AuthorizationCode { code, pkce }) + } else { + // If the request had PKCE params but no code asked, it should get back with an + // error + if params.pkce.is_some() { + return Ok(callback_destination.go( + &templates, + &locale, + ClientError::from(ClientErrorCode::InvalidRequest), + )?); + } + + None + }; + + let grant = repo + .oauth2_authorization_grant() + .add( + &mut rng, + &clock, + &client, + redirect_uri.clone(), + params.auth.scope, + code, + params.auth.state.clone(), + params.auth.nonce, + response_mode, + response_type.has_id_token(), + params.auth.login_hint, + Some(locale.to_string()), + ) + .await?; + let continue_grant = PostAuthAction::continue_grant(grant.id); + + let res = match maybe_session { + None if prompt.contains(&Prompt::Create) => { + // Client asked for a registration, show the registration prompt + repo.save().await?; + + url_builder + .redirect(&mas_router::Register::and_then(continue_grant)) + .into_response() + } + + None => { + // Other cases where we don't have a session, ask for a login + repo.save().await?; + + url_builder + .redirect(&mas_router::Login::and_then(continue_grant)) + .into_response() + } + + Some(user_session) => { + // TODO: better support for prompt=create when we have a session + repo.save().await?; + + activity_tracker + .record_browser_session(&clock, &user_session) + .await; + url_builder + .redirect(&mas_router::Consent(grant.id)) + .into_response() + } + }; + + Ok(res) + } + }) + .await; + + let response = match res { + Ok(r) => r, + Err(err) => { + tracing::error!(message = &err as &dyn std::error::Error); + callback_destination.go( + &templates, + &locale, + ClientError::from(ClientErrorCode::ServerError), + )? + } + }; + + Ok((cookie_jar, response).into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/device/authorize.rs b/matrix-authentication-service/crates/handlers/src/oauth2/device/authorize.rs new file mode 100644 index 00000000..41513161 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/device/authorize.rs @@ -0,0 +1,227 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{Json, extract::State, response::IntoResponse}; +use axum_extra::typed_header::TypedHeader; +use chrono::Duration; +use headers::{CacheControl, Pragma}; +use hyper::StatusCode; +use mas_axum_utils::{ + client_authorization::{ClientAuthorization, CredentialsVerificationError}, + record_error, +}; +use mas_data_model::{BoxClock, BoxRng}; +use mas_keystore::Encrypter; +use mas_router::UrlBuilder; +use mas_storage::{BoxRepository, oauth2::OAuth2DeviceCodeGrantParams}; +use oauth2_types::{ + errors::{ClientError, ClientErrorCode}, + requests::{DeviceAuthorizationRequest, DeviceAuthorizationResponse, GrantType}, + scope::ScopeToken, +}; +use rand::distributions::{Alphanumeric, DistString}; +use thiserror::Error; +use ulid::Ulid; + +use crate::{BoundActivityTracker, impl_from_error_for_route}; + +#[derive(Debug, Error)] +pub(crate) enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("client not found")] + ClientNotFound, + + #[error("client {0} is not allowed to use the device code grant")] + ClientNotAllowed(Ulid), + + #[error("invalid client credentials for client {client_id}")] + InvalidClientCredentials { + client_id: Ulid, + #[source] + source: CredentialsVerificationError, + }, + + #[error("could not verify client credentials for client {client_id}")] + ClientCredentialsVerification { + client_id: Ulid, + #[source] + source: CredentialsVerificationError, + }, +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!(self, Self::Internal(_)); + + let response = match self { + Self::Internal(_) | Self::ClientCredentialsVerification { .. } => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ClientError::from(ClientErrorCode::ServerError)), + ), + Self::ClientNotFound | Self::InvalidClientCredentials { .. } => ( + StatusCode::UNAUTHORIZED, + Json(ClientError::from(ClientErrorCode::InvalidClient)), + ), + Self::ClientNotAllowed(_) => ( + StatusCode::UNAUTHORIZED, + Json(ClientError::from(ClientErrorCode::UnauthorizedClient)), + ), + }; + + (sentry_event_id, response).into_response() + } +} + +#[tracing::instrument( + name = "handlers.oauth2.device.request.post", + fields(client.id = client_authorization.client_id()), + skip_all, +)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + user_agent: Option>, + activity_tracker: BoundActivityTracker, + State(url_builder): State, + State(http_client): State, + State(encrypter): State, + client_authorization: ClientAuthorization, +) -> Result { + let client = client_authorization + .credentials + .fetch(&mut repo) + .await? + .ok_or(RouteError::ClientNotFound)?; + + // Reuse the token endpoint auth method to verify the client + let method = client + .token_endpoint_auth_method + .as_ref() + .ok_or(RouteError::ClientNotAllowed(client.id))?; + + client_authorization + .credentials + .verify(&http_client, &encrypter, method, &client) + .await + .map_err(|err| { + if err.is_internal() { + RouteError::ClientCredentialsVerification { + client_id: client.id, + source: err, + } + } else { + RouteError::InvalidClientCredentials { + client_id: client.id, + source: err, + } + } + })?; + + if !client.grant_types.contains(&GrantType::DeviceCode) { + return Err(RouteError::ClientNotAllowed(client.id)); + } + + let scope = client_authorization + .form + .and_then(|f| f.scope) + // XXX: Is this really how we do empty scopes? + .unwrap_or(std::iter::empty::().collect()); + + let expires_in = Duration::microseconds(20 * 60 * 1000 * 1000); + + let user_agent = user_agent.map(|ua| ua.as_str().to_owned()); + let ip_address = activity_tracker.ip(); + + let device_code = Alphanumeric.sample_string(&mut rng, 32); + let user_code = Alphanumeric.sample_string(&mut rng, 6).to_uppercase(); + + let device_code = repo + .oauth2_device_code_grant() + .add( + &mut rng, + &clock, + OAuth2DeviceCodeGrantParams { + client: &client, + scope, + device_code, + user_code, + expires_in, + user_agent, + ip_address, + }, + ) + .await?; + + repo.save().await?; + + let response = DeviceAuthorizationResponse { + device_code: device_code.device_code, + user_code: device_code.user_code.clone(), + verification_uri: url_builder.device_code_link(), + verification_uri_complete: Some(url_builder.device_code_link_full(device_code.user_code)), + expires_in, + interval: Some(Duration::microseconds(5 * 1000 * 1000)), + }; + + Ok(( + StatusCode::OK, + TypedHeader(CacheControl::new().with_no_store()), + TypedHeader(Pragma::no_cache()), + Json(response), + )) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_router::SimpleRoute; + use oauth2_types::{ + registration::ClientRegistrationResponse, requests::DeviceAuthorizationResponse, + }; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_device_code_request(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "token_endpoint_auth_method": "none", + "grant_types": ["urn:ietf:params:oauth:grant-type:device_code"], + "response_types": [], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let response: ClientRegistrationResponse = response.json(); + let client_id = response.client_id; + + // Test the happy path: the client is allowed to use the device code grant type + let request = Request::post(mas_router::OAuth2DeviceAuthorizationEndpoint::PATH).form( + serde_json::json!({ + "client_id": client_id, + "scope": "openid", + }), + ); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let response: DeviceAuthorizationResponse = response.json(); + assert_eq!(response.device_code.len(), 32); + assert_eq!(response.user_code.len(), 6); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/device/consent.rs b/matrix-authentication-service/crates/handlers/src/oauth2/device/consent.rs new file mode 100644 index 00000000..3912d2dc --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/device/consent.rs @@ -0,0 +1,347 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{sync::Arc, time::Duration}; + +use anyhow::Context; +use axum::{ + Form, + extract::{Path, State}, + response::{Html, IntoResponse, Response}, +}; +use axum_extra::TypedHeader; +use mas_axum_utils::{ + InternalError, + cookies::CookieJar, + csrf::{CsrfExt, ProtectedForm}, +}; +use mas_data_model::{BoxClock, BoxRng, MatrixUser}; +use mas_matrix::HomeserverConnection; +use mas_policy::Policy; +use mas_router::UrlBuilder; +use mas_storage::BoxRepository; +use mas_templates::{DeviceConsentContext, PolicyViolationContext, TemplateContext, Templates}; +use serde::Deserialize; +use tracing::warn; +use ulid::Ulid; + +use crate::{ + BoundActivityTracker, PreferredLanguage, + session::{SessionOrFallback, count_user_sessions_for_limiting, load_session_or_fallback}, +}; + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "lowercase")] +enum Action { + Consent, + Reject, +} + +#[derive(Deserialize, Debug)] +pub(crate) struct ConsentForm { + action: Action, +} + +#[tracing::instrument(name = "handlers.oauth2.device.consent.get", skip_all)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + State(homeserver): State>, + mut repo: BoxRepository, + mut policy: Policy, + activity_tracker: BoundActivityTracker, + user_agent: Option>, + cookie_jar: CookieJar, + Path(grant_id): Path, +) -> Result { + let (cookie_jar, maybe_session) = match load_session_or_fallback( + cookie_jar, &clock, &mut rng, &templates, &locale, &mut repo, + ) + .await? + { + SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session, + .. + } => (cookie_jar, maybe_session), + SessionOrFallback::Fallback { response } => return Ok(response), + }; + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let user_agent = user_agent.map(|ua| ua.to_string()); + + let Some(session) = maybe_session else { + let login = mas_router::Login::and_continue_device_code_grant(grant_id); + return Ok((cookie_jar, url_builder.redirect(&login)).into_response()); + }; + + activity_tracker + .record_browser_session(&clock, &session) + .await; + + // TODO: better error handling + let grant = repo + .oauth2_device_code_grant() + .lookup(grant_id) + .await? + .context("Device grant not found") + .map_err(InternalError::from_anyhow)?; + + if grant.expires_at < clock.now() { + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "Grant is expired" + ))); + } + + let client = repo + .oauth2_client() + .lookup(grant.client_id) + .await? + .context("Client not found") + .map_err(InternalError::from_anyhow)?; + + let session_counts = count_user_sessions_for_limiting(&mut repo, &session.user).await?; + + // We can close the repository early, we don't need it at this point + repo.save().await?; + + // Evaluate the policy + let res = policy + .evaluate_authorization_grant(mas_policy::AuthorizationGrantInput { + grant_type: mas_policy::GrantType::DeviceCode, + client: &client, + session_counts: Some(session_counts), + scope: &grant.scope, + user: Some(&session.user), + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent, + }, + }) + .await?; + if !res.valid() { + warn!(violation = ?res, "Device code grant for client {} denied by policy", client.id); + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let ctx = PolicyViolationContext::for_device_code_grant(grant, client) + .with_session(session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_policy_violation(&ctx)?; + + return Ok((cookie_jar, Html(content)).into_response()); + } + + // Fetch informations about the user. This is purely cosmetic, so we let it + // fail and put a 1s timeout to it in case we fail to query it + // XXX: we're likely to need this in other places + let localpart = &session.user.username; + let display_name = match tokio::time::timeout( + Duration::from_secs(1), + homeserver.query_user(localpart), + ) + .await + { + Ok(Ok(user)) => user.displayname, + Ok(Err(err)) => { + tracing::warn!( + error = &*err as &dyn std::error::Error, + localpart, + "Failed to query user" + ); + None + } + Err(_) => { + tracing::warn!(localpart, "Timed out while querying user"); + None + } + }; + + let matrix_user = MatrixUser { + mxid: homeserver.mxid(localpart), + display_name, + }; + + let ctx = DeviceConsentContext::new(grant, client, matrix_user) + .with_session(session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let rendered = templates + .render_device_consent(&ctx) + .context("Failed to render template") + .map_err(InternalError::from_anyhow)?; + + Ok((cookie_jar, Html(rendered)).into_response()) +} + +#[tracing::instrument(name = "handlers.oauth2.device.consent.post", skip_all)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + State(homeserver): State>, + mut repo: BoxRepository, + mut policy: Policy, + activity_tracker: BoundActivityTracker, + user_agent: Option>, + cookie_jar: CookieJar, + Path(grant_id): Path, + Form(form): Form>, +) -> Result { + let form = cookie_jar.verify_form(&clock, form)?; + let (cookie_jar, maybe_session) = match load_session_or_fallback( + cookie_jar, &clock, &mut rng, &templates, &locale, &mut repo, + ) + .await? + { + SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session, + .. + } => (cookie_jar, maybe_session), + SessionOrFallback::Fallback { response } => return Ok(response), + }; + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let user_agent = user_agent.map(|TypedHeader(ua)| ua.to_string()); + + let Some(session) = maybe_session else { + let login = mas_router::Login::and_continue_device_code_grant(grant_id); + return Ok((cookie_jar, url_builder.redirect(&login)).into_response()); + }; + + activity_tracker + .record_browser_session(&clock, &session) + .await; + + // TODO: better error handling + let grant = repo + .oauth2_device_code_grant() + .lookup(grant_id) + .await? + .context("Device grant not found") + .map_err(InternalError::from_anyhow)?; + + if grant.expires_at < clock.now() { + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "Grant is expired" + ))); + } + + let client = repo + .oauth2_client() + .lookup(grant.client_id) + .await? + .context("Client not found") + .map_err(InternalError::from_anyhow)?; + + let session_counts = count_user_sessions_for_limiting(&mut repo, &session.user).await?; + + // Evaluate the policy + let res = policy + .evaluate_authorization_grant(mas_policy::AuthorizationGrantInput { + grant_type: mas_policy::GrantType::DeviceCode, + client: &client, + session_counts: Some(session_counts), + scope: &grant.scope, + user: Some(&session.user), + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent, + }, + }) + .await?; + if !res.valid() { + warn!(violation = ?res, "Device code grant for client {} denied by policy", client.id); + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let ctx = PolicyViolationContext::for_device_code_grant(grant, client) + .with_session(session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_policy_violation(&ctx)?; + + return Ok((cookie_jar, Html(content)).into_response()); + } + + let grant = if grant.is_pending() { + match form.action { + Action::Consent => { + repo.oauth2_device_code_grant() + .fulfill(&clock, grant, &session) + .await? + } + Action::Reject => { + repo.oauth2_device_code_grant() + .reject(&clock, grant, &session) + .await? + } + } + } else { + // XXX: In case we're not pending, let's just return the grant as-is + // since it might just be a form resubmission, and feedback is nice enough + warn!( + oauth2_device_code.id = %grant.id, + browser_session.id = %session.id, + user.id = %session.user.id, + "Grant is not pending", + ); + grant + }; + + repo.save().await?; + + // Fetch informations about the user. This is purely cosmetic, so we let it + // fail and put a 1s timeout to it in case we fail to query it + // XXX: we're likely to need this in other places + let localpart = &session.user.username; + let display_name = match tokio::time::timeout( + Duration::from_secs(1), + homeserver.query_user(localpart), + ) + .await + { + Ok(Ok(user)) => user.displayname, + Ok(Err(err)) => { + tracing::warn!( + error = &*err as &dyn std::error::Error, + localpart, + "Failed to query user" + ); + None + } + Err(_) => { + tracing::warn!(localpart, "Timed out while querying user"); + None + } + }; + + let matrix_user = MatrixUser { + mxid: homeserver.mxid(localpart), + display_name, + }; + + let ctx = DeviceConsentContext::new(grant, client, matrix_user) + .with_session(session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let rendered = templates + .render_device_consent(&ctx) + .context("Failed to render template") + .map_err(InternalError::from_anyhow)?; + + Ok((cookie_jar, Html(rendered)).into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/device/link.rs b/matrix-authentication-service/crates/handlers/src/oauth2/device/link.rs new file mode 100644 index 00000000..84d0c507 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/device/link.rs @@ -0,0 +1,73 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + extract::State, + response::{Html, IntoResponse}, +}; +use axum_extra::extract::Query; +use mas_axum_utils::{InternalError, cookies::CookieJar}; +use mas_data_model::BoxClock; +use mas_router::UrlBuilder; +use mas_storage::BoxRepository; +use mas_templates::{ + DeviceLinkContext, DeviceLinkFormField, FieldError, FormState, TemplateContext, Templates, +}; +use serde::{Deserialize, Serialize}; + +use crate::PreferredLanguage; + +#[derive(Serialize, Deserialize)] +pub struct Params { + #[serde(default)] + code: Option, +} + +#[tracing::instrument(name = "handlers.oauth2.device.link.get", skip_all)] +pub(crate) async fn get( + clock: BoxClock, + mut repo: BoxRepository, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + cookie_jar: CookieJar, + Query(query): Query, +) -> Result { + let mut form_state = FormState::from_form(&query); + + // If we have a code in query, find it in the database + if let Some(code) = &query.code { + // Find the code in the database + let code = code.to_uppercase(); + let grant = repo + .oauth2_device_code_grant() + .find_by_user_code(&code) + .await? + // XXX: We should have different error messages for already exchanged and expired + .filter(|grant| grant.is_pending()) + .filter(|grant| grant.expires_at > clock.now()); + + if let Some(grant) = grant { + // This is a valid code, redirect to the consent page + // This will in turn redirect to the login page if the user is not logged in + let destination = url_builder.redirect(&mas_router::DeviceCodeConsent::new(grant.id)); + + return Ok((cookie_jar, destination).into_response()); + } + + // The code isn't valid, set an error on the form + form_state = form_state.with_error_on_field(DeviceLinkFormField::Code, FieldError::Invalid); + } + + // Rendre the form + let ctx = DeviceLinkContext::new() + .with_form_state(form_state) + .with_language(locale); + + let content = templates.render_device_link(&ctx)?; + + Ok((cookie_jar, Html(content)).into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/device/mod.rs b/matrix-authentication-service/crates/handlers/src/oauth2/device/mod.rs new file mode 100644 index 00000000..565ce5df --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/device/mod.rs @@ -0,0 +1,9 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +pub mod authorize; +pub mod consent; +pub mod link; diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/discovery.rs b/matrix-authentication-service/crates/handlers/src/oauth2/discovery.rs new file mode 100644 index 00000000..b1d000f8 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/discovery.rs @@ -0,0 +1,221 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{Json, extract::State, response::IntoResponse}; +use mas_iana::oauth::{ + OAuthAuthorizationEndpointResponseType, OAuthClientAuthenticationMethod, + PkceCodeChallengeMethod, +}; +use mas_jose::jwa::SUPPORTED_SIGNING_ALGORITHMS; +use mas_keystore::Keystore; +use mas_router::UrlBuilder; +use oauth2_types::{ + oidc::{ClaimType, ProviderMetadata, SubjectType}, + requests::{Display, GrantType, Prompt, ResponseMode}, + scope, +}; +use serde::Serialize; + +use crate::SiteConfig; + +#[derive(Debug, Serialize)] +struct DiscoveryResponse { + #[serde(flatten)] + standard: ProviderMetadata, + + #[serde(rename = "org.matrix.matrix-authentication-service.graphql_endpoint")] + graphql_endpoint: url::Url, + + // As per MSC4191 + account_management_uri: url::Url, + account_management_actions_supported: Vec, +} + +#[tracing::instrument(name = "handlers.oauth2.discovery.get", skip_all)] +pub(crate) async fn get( + State(key_store): State, + State(url_builder): State, + State(site_config): State, +) -> impl IntoResponse { + // This is how clients can authenticate + let client_auth_methods_supported = Some(vec![ + OAuthClientAuthenticationMethod::ClientSecretBasic, + OAuthClientAuthenticationMethod::ClientSecretPost, + OAuthClientAuthenticationMethod::ClientSecretJwt, + OAuthClientAuthenticationMethod::PrivateKeyJwt, + OAuthClientAuthenticationMethod::None, + ]); + + // Those are the algorithms supported by `mas-jose` + let client_auth_signing_alg_values_supported = Some(SUPPORTED_SIGNING_ALGORITHMS.to_vec()); + + // This is how we can sign stuff + let jwt_signing_alg_values_supported = Some(key_store.available_signing_algorithms()); + + // Prepare all the endpoints + let issuer = Some(url_builder.oidc_issuer().into()); + let authorization_endpoint = Some(url_builder.oauth_authorization_endpoint()); + let token_endpoint = Some(url_builder.oauth_token_endpoint()); + let device_authorization_endpoint = Some(url_builder.oauth_device_authorization_endpoint()); + let jwks_uri = Some(url_builder.jwks_uri()); + let introspection_endpoint = Some(url_builder.oauth_introspection_endpoint()); + let revocation_endpoint = Some(url_builder.oauth_revocation_endpoint()); + let userinfo_endpoint = Some(url_builder.oidc_userinfo_endpoint()); + let registration_endpoint = Some(url_builder.oauth_registration_endpoint()); + + let scopes_supported = Some(vec![scope::OPENID.to_string(), scope::EMAIL.to_string()]); + + let response_types_supported = Some(vec![ + OAuthAuthorizationEndpointResponseType::Code.into(), + OAuthAuthorizationEndpointResponseType::IdToken.into(), + OAuthAuthorizationEndpointResponseType::CodeIdToken.into(), + ]); + + let response_modes_supported = Some(vec![ + ResponseMode::FormPost, + ResponseMode::Query, + ResponseMode::Fragment, + ]); + + let grant_types_supported = Some(vec![ + GrantType::AuthorizationCode, + GrantType::RefreshToken, + GrantType::ClientCredentials, + GrantType::DeviceCode, + ]); + + let token_endpoint_auth_methods_supported = client_auth_methods_supported.clone(); + let token_endpoint_auth_signing_alg_values_supported = + client_auth_signing_alg_values_supported.clone(); + + let revocation_endpoint_auth_methods_supported = client_auth_methods_supported.clone(); + let revocation_endpoint_auth_signing_alg_values_supported = + client_auth_signing_alg_values_supported.clone(); + + let introspection_endpoint_auth_methods_supported = + client_auth_methods_supported.map(|v| v.into_iter().map(Into::into).collect()); + let introspection_endpoint_auth_signing_alg_values_supported = + client_auth_signing_alg_values_supported; + + let code_challenge_methods_supported = Some(vec![ + PkceCodeChallengeMethod::Plain, + PkceCodeChallengeMethod::S256, + ]); + + let subject_types_supported = Some(vec![SubjectType::Public]); + + let id_token_signing_alg_values_supported = jwt_signing_alg_values_supported.clone(); + let userinfo_signing_alg_values_supported = jwt_signing_alg_values_supported; + + let display_values_supported = Some(vec![Display::Page]); + + let claim_types_supported = Some(vec![ClaimType::Normal]); + + let claims_supported = Some(vec![ + "iss".to_owned(), + "sub".to_owned(), + "aud".to_owned(), + "iat".to_owned(), + "exp".to_owned(), + "nonce".to_owned(), + "auth_time".to_owned(), + "at_hash".to_owned(), + "c_hash".to_owned(), + ]); + + let claims_parameter_supported = Some(false); + let request_parameter_supported = Some(false); + let request_uri_parameter_supported = Some(false); + + let prompt_values_supported = Some({ + let mut v = vec![Prompt::Login]; + // Advertise for prompt=create if password registration is enabled + // TODO: we may want to be able to forward that to upstream providers if they + // support it + if site_config.password_registration_enabled { + v.push(Prompt::Create); + } + v + }); + + let standard = ProviderMetadata { + issuer, + authorization_endpoint, + token_endpoint, + jwks_uri, + registration_endpoint, + scopes_supported, + response_types_supported, + response_modes_supported, + grant_types_supported, + token_endpoint_auth_methods_supported, + token_endpoint_auth_signing_alg_values_supported, + revocation_endpoint, + revocation_endpoint_auth_methods_supported, + revocation_endpoint_auth_signing_alg_values_supported, + introspection_endpoint, + introspection_endpoint_auth_methods_supported, + introspection_endpoint_auth_signing_alg_values_supported, + code_challenge_methods_supported, + userinfo_endpoint, + subject_types_supported, + id_token_signing_alg_values_supported, + userinfo_signing_alg_values_supported, + display_values_supported, + claim_types_supported, + claims_supported, + claims_parameter_supported, + request_parameter_supported, + request_uri_parameter_supported, + prompt_values_supported, + device_authorization_endpoint, + ..ProviderMetadata::default() + }; + + Json(DiscoveryResponse { + standard, + graphql_endpoint: url_builder.graphql_endpoint(), + account_management_uri: url_builder.account_management_uri(), + // This needs to be kept in sync with what is supported in the frontend, + // see frontend/src/routes/__root.tsx + account_management_actions_supported: vec![ + "org.matrix.profile".to_owned(), + "org.matrix.devices_list".to_owned(), + "org.matrix.device_view".to_owned(), + "org.matrix.device_delete".to_owned(), + "org.matrix.cross_signing_reset".to_owned(), + // These are unstable versions from MSC4191 and we will remove them once the above + // stable values have enough adoption by clients + "org.matrix.sessions_list".to_owned(), + "org.matrix.session_view".to_owned(), + "org.matrix.session_end".to_owned(), + ], + }) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use oauth2_types::oidc::ProviderMetadata; + use sqlx::PgPool; + + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_valid_discovery_metadata(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let request = Request::get("/.well-known/openid-configuration").empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let metadata: ProviderMetadata = response.json(); + metadata + .validate(state.url_builder.oidc_issuer().as_str()) + .expect("Invalid metadata"); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/introspection.rs b/matrix-authentication-service/crates/handlers/src/oauth2/introspection.rs new file mode 100644 index 00000000..17f50892 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/introspection.rs @@ -0,0 +1,1302 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + collections::BTreeSet, + sync::{Arc, LazyLock}, +}; + +use axum::{Json, extract::State, http::HeaderValue, response::IntoResponse}; +use hyper::{HeaderMap, StatusCode}; +use mas_axum_utils::{ + client_authorization::{ClientAuthorization, CredentialsVerificationError}, + record_error, +}; +use mas_data_model::{ + BoxClock, Clock, Device, TokenFormatError, TokenType, personal::session::PersonalSessionOwner, +}; +use mas_iana::oauth::{OAuthClientAuthenticationMethod, OAuthTokenTypeHint}; +use mas_keystore::Encrypter; +use mas_matrix::HomeserverConnection; +use mas_storage::{ + BoxRepository, + compat::{CompatAccessTokenRepository, CompatRefreshTokenRepository, CompatSessionRepository}, + oauth2::{OAuth2AccessTokenRepository, OAuth2RefreshTokenRepository, OAuth2SessionRepository}, + user::UserRepository, +}; +use oauth2_types::{ + errors::{ClientError, ClientErrorCode}, + requests::{IntrospectionRequest, IntrospectionResponse}, + scope::{Scope, ScopeToken}, +}; +use opentelemetry::{Key, KeyValue, metrics::Counter}; +use thiserror::Error; +use ulid::Ulid; + +use crate::{ActivityTracker, METER, impl_from_error_for_route}; + +static INTROSPECTION_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.oauth2.introspection_request") + .with_description("Number of OAuth 2.0 introspection requests") + .with_unit("{request}") + .build() +}); + +const KIND: Key = Key::from_static_str("kind"); +const ACTIVE: Key = Key::from_static_str("active"); + +#[derive(Debug, Error)] +pub enum RouteError { + /// An internal error occurred. + #[error(transparent)] + Internal(Box), + + /// The client could not be found. + #[error("could not find client")] + ClientNotFound, + + /// The client is not allowed to introspect. + #[error("client {0} is not allowed to introspect")] + NotAllowed(Ulid), + + /// The token type is not the one expected. + #[error("unexpected token type")] + UnexpectedTokenType, + + /// The overall token format is invalid. + #[error("invalid token format")] + InvalidTokenFormat(#[from] TokenFormatError), + + /// The token could not be found in the database. + #[error("unknown {0}")] + UnknownToken(TokenType), + + /// The token is not valid. + #[error("{0} is not valid")] + InvalidToken(TokenType), + + /// The OAuth session is not valid. + #[error("invalid oauth session {0}")] + InvalidOAuthSession(Ulid), + + /// The OAuth session could not be found in the database. + #[error("unknown oauth session {0}")] + CantLoadOAuthSession(Ulid), + + /// The compat session is not valid. + #[error("invalid compat session {0}")] + InvalidCompatSession(Ulid), + + /// The compat session could not be found in the database. + #[error("unknown compat session {0}")] + CantLoadCompatSession(Ulid), + + /// The personal access token session is not valid. + #[error("invalid personal access token session {0}")] + InvalidPersonalSession(Ulid), + + /// The personal access token session could not be found in the database. + #[error("unknown personal access token session {0}")] + CantLoadPersonalSession(Ulid), + + /// The Device ID in the compat session can't be encoded as a scope + #[error("device ID contains characters that are not allowed in a scope")] + CantEncodeDeviceID(#[from] mas_data_model::ToScopeTokenError), + + #[error("invalid user {0}")] + InvalidUser(Ulid), + + #[error("unknown user {0}")] + CantLoadUser(Ulid), + + #[error("unknown OAuth2 client {0}")] + CantLoadOAuth2Client(Ulid), + + #[error("bad request")] + BadRequest, + + #[error("failed to verify token")] + FailedToVerifyToken(#[source] anyhow::Error), + + #[error(transparent)] + ClientCredentialsVerification(#[from] CredentialsVerificationError), + + #[error("bearer token presented is invalid")] + InvalidBearerToken, +} + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!( + self, + Self::Internal(_) + | Self::CantLoadCompatSession(_) + | Self::CantLoadOAuthSession(_) + | Self::CantLoadUser(_) + | Self::FailedToVerifyToken(_) + ); + + let response = match self { + e @ (Self::Internal(_) + | Self::CantLoadCompatSession(_) + | Self::CantLoadOAuthSession(_) + | Self::CantLoadPersonalSession(_) + | Self::CantLoadUser(_) + | Self::CantLoadOAuth2Client(_) + | Self::FailedToVerifyToken(_)) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json( + ClientError::from(ClientErrorCode::ServerError).with_description(e.to_string()), + ), + ) + .into_response(), + Self::ClientNotFound => ( + StatusCode::UNAUTHORIZED, + Json(ClientError::from(ClientErrorCode::InvalidClient)), + ) + .into_response(), + Self::ClientCredentialsVerification(e) => ( + StatusCode::UNAUTHORIZED, + Json( + ClientError::from(ClientErrorCode::InvalidClient) + .with_description(e.to_string()), + ), + ) + .into_response(), + e @ Self::InvalidBearerToken => ( + StatusCode::UNAUTHORIZED, + Json( + ClientError::from(ClientErrorCode::AccessDenied) + .with_description(e.to_string()), + ), + ) + .into_response(), + + Self::UnknownToken(_) + | Self::UnexpectedTokenType + | Self::InvalidToken(_) + | Self::InvalidUser(_) + | Self::InvalidCompatSession(_) + | Self::InvalidOAuthSession(_) + | Self::InvalidPersonalSession(_) + | Self::InvalidTokenFormat(_) + | Self::CantEncodeDeviceID(_) => { + INTROSPECTION_COUNTER.add(1, &[KeyValue::new(ACTIVE.clone(), false)]); + + Json(INACTIVE).into_response() + } + + Self::NotAllowed(_) => ( + StatusCode::UNAUTHORIZED, + Json(ClientError::from(ClientErrorCode::AccessDenied)), + ) + .into_response(), + + Self::BadRequest => ( + StatusCode::BAD_REQUEST, + Json(ClientError::from(ClientErrorCode::InvalidRequest)), + ) + .into_response(), + }; + + (sentry_event_id, response).into_response() + } +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +const INACTIVE: IntrospectionResponse = IntrospectionResponse { + active: false, + scope: None, + client_id: None, + username: None, + token_type: None, + exp: None, + expires_in: None, + iat: None, + nbf: None, + sub: None, + aud: None, + iss: None, + jti: None, + device_id: None, +}; + +const UNSTABLE_API_SCOPE: ScopeToken = + ScopeToken::from_static("urn:matrix:org.matrix.msc2967.client:api:*"); +const STABLE_API_SCOPE: ScopeToken = ScopeToken::from_static("urn:matrix:client:api:*"); +const SYNAPSE_ADMIN_SCOPE: ScopeToken = ScopeToken::from_static("urn:synapse:admin:*"); + +/// Normalize a scope by adding the stable and unstable API scopes equivalents +/// if missing +fn normalize_scope(mut scope: Scope) -> Scope { + // Here we abuse the fact that the scope is a BTreeSet to not care about + // duplicates + let mut to_add = BTreeSet::new(); + for token in &*scope { + if token == &STABLE_API_SCOPE { + to_add.insert(UNSTABLE_API_SCOPE); + } else if token == &UNSTABLE_API_SCOPE { + to_add.insert(STABLE_API_SCOPE); + } else if let Some(device) = Device::from_scope_token(token) { + let tokens = device + .to_scope_token() + .expect("from/to scope token rountrip should never fail"); + to_add.extend(tokens); + } + } + scope.append(&mut to_add); + scope +} + +#[tracing::instrument( + name = "handlers.oauth2.introspection.post", + fields(client.id = credentials.client_id()), + skip_all, +)] +pub(crate) async fn post( + clock: BoxClock, + State(http_client): State, + mut repo: BoxRepository, + activity_tracker: ActivityTracker, + State(encrypter): State, + State(homeserver): State>, + headers: HeaderMap, + ClientAuthorization { credentials, form }: ClientAuthorization, +) -> Result { + if let Some(token) = credentials.bearer_token() { + // If the client presented a bearer token, we check with the homeserver + // configuration if it is allowed to use the introspection endpoint + if !homeserver + .verify_token(token) + .await + .map_err(RouteError::FailedToVerifyToken)? + { + return Err(RouteError::InvalidBearerToken); + } + } else { + // Otherwise, it presented regular client credentials, so we verify them + let client = credentials + .fetch(&mut repo) + .await? + .ok_or(RouteError::ClientNotFound)?; + + // Only confidential clients are allowed to introspect + let method = match &client.token_endpoint_auth_method { + None | Some(OAuthClientAuthenticationMethod::None) => { + return Err(RouteError::NotAllowed(client.id)); + } + Some(c) => c, + }; + + credentials + .verify(&http_client, &encrypter, method, &client) + .await?; + } + + let Some(form) = form else { + return Err(RouteError::BadRequest); + }; + + let token = &form.token; + let token_type = TokenType::check(token)?; + if let Some(hint) = form.token_type_hint + && token_type != hint + { + return Err(RouteError::UnexpectedTokenType); + } + + // Not all device IDs can be encoded as scope. On OAuth 2.0 sessions, we + // don't have this problem, as the device ID *is* already encoded as a scope. + // But on compatibility sessions, it's possible to have device IDs with + // spaces in them, or other weird characters. + // In those cases, we prefer explicitly giving out the device ID as a separate + // field. The client introspecting tells us whether it supports having the + // device ID as a separate field through this header. + let supports_explicit_device_id = + headers.get("X-MAS-Supports-Device-Id") == Some(&HeaderValue::from_static("1")); + + // XXX: we should get the IP from the client introspecting the token + let ip = None; + + let reply = match token_type { + TokenType::AccessToken => { + let mut access_token = repo + .oauth2_access_token() + .find_by_token(token) + .await? + .ok_or(RouteError::UnknownToken(TokenType::AccessToken))?; + + if !access_token.is_valid(clock.now()) { + return Err(RouteError::InvalidToken(TokenType::AccessToken)); + } + + let session = repo + .oauth2_session() + .lookup(access_token.session_id) + .await? + .ok_or(RouteError::CantLoadOAuthSession(access_token.session_id))?; + + if !session.is_valid() { + return Err(RouteError::InvalidOAuthSession(session.id)); + } + + // If this is the first time we're using this token, mark it as used + if !access_token.is_used() { + access_token = repo + .oauth2_access_token() + .mark_used(&clock, access_token) + .await?; + } + + // The session might not have a user on it (for Client Credentials grants for + // example), so we're optionally fetching the user + let (sub, username) = if let Some(user_id) = session.user_id { + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::CantLoadUser(user_id))?; + + if !user.is_valid() { + return Err(RouteError::InvalidUser(user.id)); + } + + (Some(user.sub), Some(user.username)) + } else { + (None, None) + }; + + activity_tracker + .record_oauth2_session(&clock, &session, ip) + .await; + + INTROSPECTION_COUNTER.add( + 1, + &[ + KeyValue::new(KIND, "oauth2_access_token"), + KeyValue::new(ACTIVE, true), + ], + ); + + let scope = normalize_scope(session.scope); + + IntrospectionResponse { + active: true, + scope: Some(scope), + client_id: Some(session.client_id.to_string()), + username, + token_type: Some(OAuthTokenTypeHint::AccessToken), + exp: access_token.expires_at, + expires_in: access_token + .expires_at + .map(|expires_at| expires_at.signed_duration_since(clock.now())), + iat: Some(access_token.created_at), + nbf: Some(access_token.created_at), + sub, + aud: None, + iss: None, + jti: Some(access_token.jti()), + device_id: None, + } + } + + TokenType::RefreshToken => { + let refresh_token = repo + .oauth2_refresh_token() + .find_by_token(token) + .await? + .ok_or(RouteError::UnknownToken(TokenType::RefreshToken))?; + + if !refresh_token.is_valid() { + return Err(RouteError::InvalidToken(TokenType::RefreshToken)); + } + + let session = repo + .oauth2_session() + .lookup(refresh_token.session_id) + .await? + .ok_or(RouteError::CantLoadOAuthSession(refresh_token.session_id))?; + + if !session.is_valid() { + return Err(RouteError::InvalidOAuthSession(session.id)); + } + + // The session might not have a user on it (for Client Credentials grants for + // example), so we're optionally fetching the user + let (sub, username) = if let Some(user_id) = session.user_id { + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::CantLoadUser(user_id))?; + + if !user.is_valid() { + return Err(RouteError::InvalidUser(user.id)); + } + + (Some(user.sub), Some(user.username)) + } else { + (None, None) + }; + + activity_tracker + .record_oauth2_session(&clock, &session, ip) + .await; + + INTROSPECTION_COUNTER.add( + 1, + &[ + KeyValue::new(KIND, "oauth2_refresh_token"), + KeyValue::new(ACTIVE, true), + ], + ); + + let scope = normalize_scope(session.scope); + + IntrospectionResponse { + active: true, + scope: Some(scope), + client_id: Some(session.client_id.to_string()), + username, + token_type: Some(OAuthTokenTypeHint::RefreshToken), + exp: None, + expires_in: None, + iat: Some(refresh_token.created_at), + nbf: Some(refresh_token.created_at), + sub, + aud: None, + iss: None, + jti: Some(refresh_token.jti()), + device_id: None, + } + } + + TokenType::CompatAccessToken => { + let access_token = repo + .compat_access_token() + .find_by_token(token) + .await? + .ok_or(RouteError::UnknownToken(TokenType::CompatAccessToken))?; + + if !access_token.is_valid(clock.now()) { + return Err(RouteError::InvalidToken(TokenType::CompatAccessToken)); + } + + let session = repo + .compat_session() + .lookup(access_token.session_id) + .await? + .ok_or(RouteError::CantLoadCompatSession(access_token.session_id))?; + + if !session.is_valid() { + return Err(RouteError::InvalidCompatSession(session.id)); + } + + let user = repo + .user() + .lookup(session.user_id) + .await? + .ok_or(RouteError::CantLoadUser(session.user_id))?; + + if !user.is_valid() { + return Err(RouteError::InvalidUser(user.id))?; + } + + // Grant the synapse admin scope if the session has the admin flag set. + let synapse_admin_scope_opt = session.is_synapse_admin.then_some(SYNAPSE_ADMIN_SCOPE); + + // If the client supports explicitly giving the device ID in the response, skip + // encoding it in the scope + let device_scope_opt = if supports_explicit_device_id { + None + } else { + session + .device + .as_ref() + .map(Device::to_scope_token) + .transpose()? + }; + + let scope = [STABLE_API_SCOPE, UNSTABLE_API_SCOPE] + .into_iter() + .chain(device_scope_opt.into_iter().flatten()) + .chain(synapse_admin_scope_opt) + .collect(); + + activity_tracker + .record_compat_session(&clock, &session, ip) + .await; + + INTROSPECTION_COUNTER.add( + 1, + &[ + KeyValue::new(KIND, "compat_access_token"), + KeyValue::new(ACTIVE, true), + ], + ); + + IntrospectionResponse { + active: true, + scope: Some(scope), + client_id: Some("legacy".into()), + username: Some(user.username), + token_type: Some(OAuthTokenTypeHint::AccessToken), + exp: access_token.expires_at, + expires_in: access_token + .expires_at + .map(|expires_at| expires_at.signed_duration_since(clock.now())), + iat: Some(access_token.created_at), + nbf: Some(access_token.created_at), + sub: Some(user.sub), + aud: None, + iss: None, + jti: None, + device_id: session.device.map(Device::into), + } + } + + TokenType::CompatRefreshToken => { + let refresh_token = repo + .compat_refresh_token() + .find_by_token(token) + .await? + .ok_or(RouteError::UnknownToken(TokenType::CompatRefreshToken))?; + + if !refresh_token.is_valid() { + return Err(RouteError::InvalidToken(TokenType::CompatRefreshToken)); + } + + let session = repo + .compat_session() + .lookup(refresh_token.session_id) + .await? + .ok_or(RouteError::CantLoadCompatSession(refresh_token.session_id))?; + + if !session.is_valid() { + return Err(RouteError::InvalidCompatSession(session.id)); + } + + let user = repo + .user() + .lookup(session.user_id) + .await? + .ok_or(RouteError::CantLoadUser(session.user_id))?; + + if !user.is_valid() { + return Err(RouteError::InvalidUser(user.id))?; + } + + // Grant the synapse admin scope if the session has the admin flag set. + let synapse_admin_scope_opt = session.is_synapse_admin.then_some(SYNAPSE_ADMIN_SCOPE); + + // If the client supports explicitly giving the device ID in the response, skip + // encoding it in the scope + let device_scope_opt = if supports_explicit_device_id { + None + } else { + session + .device + .as_ref() + .map(Device::to_scope_token) + .transpose()? + }; + + let scope = [STABLE_API_SCOPE, UNSTABLE_API_SCOPE] + .into_iter() + .chain(device_scope_opt.into_iter().flatten()) + .chain(synapse_admin_scope_opt) + .collect(); + + activity_tracker + .record_compat_session(&clock, &session, ip) + .await; + + INTROSPECTION_COUNTER.add( + 1, + &[ + KeyValue::new(KIND, "compat_refresh_token"), + KeyValue::new(ACTIVE, true), + ], + ); + + IntrospectionResponse { + active: true, + scope: Some(scope), + client_id: Some("legacy".into()), + username: Some(user.username), + token_type: Some(OAuthTokenTypeHint::RefreshToken), + exp: None, + expires_in: None, + iat: Some(refresh_token.created_at), + nbf: Some(refresh_token.created_at), + sub: Some(user.sub), + aud: None, + iss: None, + jti: None, + device_id: session.device.map(Device::into), + } + } + + TokenType::PersonalAccessToken => { + let access_token = repo + .personal_access_token() + .find_by_token(token) + .await? + .ok_or(RouteError::UnknownToken(TokenType::AccessToken))?; + + if !access_token.is_valid(clock.now()) { + return Err(RouteError::InvalidToken(TokenType::AccessToken)); + } + + let session = repo + .personal_session() + .lookup(access_token.session_id) + .await? + .ok_or(RouteError::CantLoadPersonalSession(access_token.session_id))?; + + if !session.is_valid() { + return Err(RouteError::InvalidPersonalSession(session.id)); + } + + let actor_user = repo + .user() + .lookup(session.actor_user_id) + .await? + .ok_or(RouteError::CantLoadUser(session.actor_user_id))?; + + if !actor_user.is_valid() { + return Err(RouteError::InvalidUser(actor_user.id)); + } + + let client_id = match session.owner { + PersonalSessionOwner::User(owner_user_id) => { + let owner_user = repo + .user() + .lookup(owner_user_id) + .await? + .ok_or(RouteError::CantLoadUser(owner_user_id))?; + + if !owner_user.is_valid() { + return Err(RouteError::InvalidUser(owner_user.id)); + } + + None + } + PersonalSessionOwner::OAuth2Client(owner_client_id) => { + let owner_client = repo + .oauth2_client() + .lookup(owner_client_id) + .await? + .ok_or(RouteError::CantLoadOAuth2Client(owner_client_id))?; + + // OAuth2 clients are always valid if they're in the database + Some(owner_client.client_id.clone()) + } + }; + + activity_tracker + .record_personal_session(&clock, &session, ip) + .await; + + INTROSPECTION_COUNTER.add( + 1, + &[ + KeyValue::new(KIND, "personal_access_token"), + KeyValue::new(ACTIVE, true), + ], + ); + + let scope = normalize_scope(session.scope); + + IntrospectionResponse { + active: true, + scope: Some(scope), + client_id, + username: Some(actor_user.username), + token_type: Some(OAuthTokenTypeHint::AccessToken), + exp: access_token.expires_at, + expires_in: access_token + .expires_at + .map(|expires_at| expires_at.signed_duration_since(clock.now())), + iat: Some(access_token.created_at), + nbf: Some(access_token.created_at), + sub: Some(actor_user.sub), + aud: None, + iss: None, + jti: None, + device_id: None, + } + } + }; + + repo.save().await?; + + Ok(Json(reply)) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::{Request, StatusCode}; + use mas_data_model::{ + AccessToken, Clock, RefreshToken, TokenType, personal::session::PersonalSessionOwner, + }; + use mas_iana::oauth::OAuthTokenTypeHint; + use mas_matrix::{HomeserverConnection, MockHomeserverConnection, ProvisionRequest}; + use mas_router::{OAuth2Introspection, OAuth2RegistrationEndpoint, SimpleRoute}; + use oauth2_types::{ + errors::{ClientError, ClientErrorCode}, + registration::ClientRegistrationResponse, + requests::IntrospectionResponse, + scope::{OPENID, Scope}, + }; + use serde_json::json; + use sqlx::PgPool; + use zeroize::Zeroizing; + + use crate::{ + oauth2::generate_token_pair, + test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}, + }; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_introspect_oauth_tokens(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client which will be used to do introspection requests + let request = Request::post(OAuth2RegistrationEndpoint::PATH).json(json!({ + "client_uri": "https://introspecting.com/", + "grant_types": [], + "token_endpoint_auth_method": "client_secret_basic", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let client: ClientRegistrationResponse = response.json(); + let introspecting_client_id = client.client_id; + let introspecting_client_secret = client.client_secret.unwrap(); + + // Provision a client which will be used to generate tokens + let request = Request::post(OAuth2RegistrationEndpoint::PATH).json(json!({ + "client_uri": "https://client.com/", + "redirect_uris": ["https://client.com/"], + "response_types": ["code"], + "grant_types": ["authorization_code", "refresh_token"], + "token_endpoint_auth_method": "none", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let ClientRegistrationResponse { client_id, .. } = response.json(); + + let mut repo = state.repository().await.unwrap(); + // Provision a user and an oauth session + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + + let client = repo + .oauth2_client() + .find_by_client_id(&client_id) + .await + .unwrap() + .unwrap(); + + let browser_session = repo + .browser_session() + .add(&mut state.rng(), &state.clock, &user, None) + .await + .unwrap(); + + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut state.rng(), + &state.clock, + &client, + &browser_session, + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + + let (AccessToken { access_token, .. }, RefreshToken { refresh_token, .. }) = + generate_token_pair( + &mut state.rng(), + &state.clock, + &mut repo, + &session, + Duration::microseconds(5 * 60 * 1000 * 1000), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Now that we have a token, we can introspect it + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": access_token })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + assert_eq!(response.username, Some("alice".to_owned())); + assert_eq!(response.client_id, Some(client_id.clone())); + assert_eq!(response.token_type, Some(OAuthTokenTypeHint::AccessToken)); + assert_eq!(response.scope, Some(Scope::from_iter([OPENID]))); + + // Do the same request, but with a token_type_hint + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": access_token, "token_type_hint": "access_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + + // Do the same request, but with the wrong token_type_hint + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": access_token, "token_type_hint": "refresh_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(!response.active); // It shouldn't be active + + // Do the same, but with a refresh token + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": refresh_token })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + assert_eq!(response.username, Some("alice".to_owned())); + assert_eq!(response.client_id, Some(client_id.clone())); + assert_eq!(response.token_type, Some(OAuthTokenTypeHint::RefreshToken)); + assert_eq!(response.scope, Some(Scope::from_iter([OPENID]))); + + // Do the same request, but with a token_type_hint + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": refresh_token, "token_type_hint": "refresh_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + + // Do the same request, but with the wrong token_type_hint + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": refresh_token, "token_type_hint": "access_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(!response.active); // It shouldn't be active + + // We should have recorded the session last activity + state.activity_tracker.flush().await; + let mut repo = state.repository().await.unwrap(); + let session = repo + .oauth2_session() + .lookup(session.id) + .await + .unwrap() + .unwrap(); + assert_eq!(session.last_active_at, Some(state.clock.now())); + + // And recorded the access token as used + let access_token_lookup = repo + .oauth2_access_token() + .find_by_token(&access_token) + .await + .unwrap() + .unwrap(); + assert!(access_token_lookup.is_used()); + assert_eq!(access_token_lookup.first_used_at, Some(state.clock.now())); + repo.cancel().await.unwrap(); + + // Advance the clock to invalidate the access token + let old_now = state.clock.now(); + state.clock.advance(Duration::try_hours(1).unwrap()); + + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": access_token })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(!response.active); // It shouldn't be active anymore + + // That should not have updated the session last activity + state.activity_tracker.flush().await; + let mut repo = state.repository().await.unwrap(); + let session = repo + .oauth2_session() + .lookup(session.id) + .await + .unwrap() + .unwrap(); + assert_eq!(session.last_active_at, Some(old_now)); + repo.cancel().await.unwrap(); + + // But the refresh token should still be valid + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": refresh_token })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + + // But this time, we should have updated the session last activity + state.activity_tracker.flush().await; + let mut repo = state.repository().await.unwrap(); + let session = repo + .oauth2_session() + .lookup(session.id) + .await + .unwrap() + .unwrap(); + assert_eq!(session.last_active_at, Some(state.clock.now())); + repo.cancel().await.unwrap(); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_introspect_compat_tokens(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client which will be used to do introspection requests + let request = Request::post(OAuth2RegistrationEndpoint::PATH).json(json!({ + "client_uri": "https://introspecting.com/", + "grant_types": [], + "token_endpoint_auth_method": "client_secret_basic", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let client: ClientRegistrationResponse = response.json(); + let introspecting_client_id = client.client_id; + let introspecting_client_secret = client.client_secret.unwrap(); + + // Provision a user with a password, so that we can use the password flow + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + + state + .homeserver_connection + .provision_user(&ProvisionRequest::new(&user.username, &user.sub)) + .await + .unwrap(); + + let (version, hashed_password) = state + .password_manager + .hash(&mut state.rng(), Zeroizing::new("password".to_owned())) + .await + .unwrap(); + + repo.user_password() + .add( + &mut state.rng(), + &state.clock, + &user, + version, + hashed_password, + None, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Now do a password flow to get an access token and a refresh token + let request = Request::post("/_matrix/client/v3/login").json(json!({ + "type": "m.login.password", + "refresh_token": true, + "identifier": { + "type": "m.id.user", + "user": "alice", + }, + "password": "password", + })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: serde_json::Value = response.json(); + let access_token = response["access_token"].as_str().unwrap(); + let refresh_token = response["refresh_token"].as_str().unwrap(); + let device_id = response["device_id"].as_str().unwrap(); + let expected_scope: Scope = + format!("urn:matrix:org.matrix.msc2967.client:api:* urn:matrix:org.matrix.msc2967.client:device:{device_id} urn:matrix:client:api:* urn:matrix:client:device:{device_id}") + .parse() + .unwrap(); + + // Now that we have a token, we can introspect it + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": access_token })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + assert_eq!(response.username.as_deref(), Some("alice")); + assert_eq!(response.client_id.as_deref(), Some("legacy")); + assert_eq!(response.token_type, Some(OAuthTokenTypeHint::AccessToken)); + assert_eq!(response.scope.as_ref(), Some(&expected_scope)); + assert_eq!(response.device_id.as_deref(), Some(device_id)); + + // Check that requesting with X-MAS-Supports-Device-Id removes the device ID + // from the scope but not from the explicit device_id field + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .header("X-MAS-Supports-Device-Id", "1") + .form(json!({ "token": access_token })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + assert_eq!(response.username.as_deref(), Some("alice")); + assert_eq!(response.client_id.as_deref(), Some("legacy")); + assert_eq!(response.token_type, Some(OAuthTokenTypeHint::AccessToken)); + assert_eq!( + response.scope.map(|s| s.to_string()), + Some("urn:matrix:client:api:* urn:matrix:org.matrix.msc2967.client:api:*".to_owned()) + ); + assert_eq!(response.device_id.as_deref(), Some(device_id)); + + // Do the same request, but with a token_type_hint + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": access_token, "token_type_hint": "access_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + + // Do the same request, but with the wrong token_type_hint + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": access_token, "token_type_hint": "refresh_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(!response.active); // It shouldn't be active + + // Do the same, but with a refresh token + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": refresh_token })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + assert_eq!(response.username.as_deref(), Some("alice")); + assert_eq!(response.client_id.as_deref(), Some("legacy")); + assert_eq!(response.token_type, Some(OAuthTokenTypeHint::RefreshToken)); + assert_eq!(response.scope.as_ref(), Some(&expected_scope)); + assert_eq!(response.device_id.as_deref(), Some(device_id)); + + // Do the same request, but with a token_type_hint + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": refresh_token, "token_type_hint": "refresh_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + + // Do the same request, but with the wrong token_type_hint + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": refresh_token, "token_type_hint": "access_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(!response.active); // It shouldn't be active + + // Advance the clock to invalidate the access token + state.clock.advance(Duration::try_hours(1).unwrap()); + + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": access_token })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(!response.active); // It shouldn't be active anymore + + // But the refresh token should still be valid + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": refresh_token })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_introspect_with_bearer_token(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Check that talking to the introspection endpoint with the bearer token from + // the MockHomeserverConnection doens't error out + let request = Request::post(OAuth2Introspection::PATH) + .bearer(MockHomeserverConnection::VALID_BEARER_TOKEN) + .form(json!({ "token": "some_token" })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(!response.active); + + // Check with another token, we should get a 401 + let request = Request::post(OAuth2Introspection::PATH) + .bearer("another_token") + .form(json!({ "token": "some_token" })); + let response = state.request(request).await; + response.assert_status(StatusCode::UNAUTHORIZED); + let response: ClientError = response.json(); + assert_eq!(response.error, ClientErrorCode::AccessDenied); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_introspect_personal_access_tokens(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client which will be used to do introspection requests + let request = Request::post(OAuth2RegistrationEndpoint::PATH).json(json!({ + "client_uri": "https://introspecting.com/", + "grant_types": [], + "token_endpoint_auth_method": "client_secret_basic", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let client: ClientRegistrationResponse = response.json(); + let introspecting_client_id = client.client_id; + let introspecting_client_secret = client.client_secret.unwrap(); + + let mut repo = state.repository().await.unwrap(); + + // Provision an owner user (who provisions the personal session) + let owner_user = repo + .user() + .add(&mut state.rng(), &state.clock, "admin".to_owned()) + .await + .unwrap(); + + // Provision an actor user (which the token represents) + let actor_user = repo + .user() + .add(&mut state.rng(), &state.clock, "bruce".to_owned()) + .await + .unwrap(); + + // admin creates a personal session to control bruce's account + let personal_session = repo + .personal_session() + .add( + &mut state.rng(), + &state.clock, + PersonalSessionOwner::User(owner_user.id), + &actor_user, + "Test Personal Access Token".to_owned(), + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + + // Generate a personal access token with proper token format + let token_string = TokenType::PersonalAccessToken.generate(&mut state.rng()); + let _personal_access_token = repo + .personal_access_token() + .add( + &mut state.rng(), + &state.clock, + &personal_session, + &token_string, + Some(Duration::try_hours(1).unwrap()), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Now that we have a personal access token, we can introspect it + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": token_string })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + // Actor user + assert_eq!(response.username, Some("bruce".to_owned())); + // Not owned by a client + assert_eq!(response.client_id, None); + assert_eq!(response.token_type, Some(OAuthTokenTypeHint::AccessToken)); + assert_eq!(response.scope, Some(Scope::from_iter([OPENID]))); + + // Do the same request, but with a token_type_hint + let last_active = state.clock.now(); + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": token_string, "token_type_hint": "access_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(response.active); + + // Do the same request, but with the wrong token_type_hint + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({"token": token_string, "token_type_hint": "refresh_token"})); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(!response.active); // It shouldn't be active with wrong hint + + // Advance the clock to invalidate the access token + state.clock.advance(Duration::try_hours(2).unwrap()); + + let request = Request::post(OAuth2Introspection::PATH) + .basic_auth(&introspecting_client_id, &introspecting_client_secret) + .form(json!({ "token": token_string })); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let response: IntrospectionResponse = response.json(); + assert!(!response.active); // It shouldn't be active anymore + + state.activity_tracker.flush().await; + let mut repo = state.repository().await.unwrap(); + let session = repo + .personal_session() + .lookup(personal_session.id) + .await + .unwrap() + .unwrap(); + assert_eq!(session.last_active_at, Some(last_active)); + repo.save().await.unwrap(); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/keys.rs b/matrix-authentication-service/crates/handlers/src/oauth2/keys.rs new file mode 100644 index 00000000..04e4135e --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/keys.rs @@ -0,0 +1,14 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{Json, extract::State, response::IntoResponse}; +use mas_keystore::Keystore; + +#[tracing::instrument(name = "handlers.oauth2.keys.get", skip_all)] +pub(crate) async fn get(State(key_store): State) -> impl IntoResponse { + let jwks = key_store.public_jwks(); + Json(jwks) +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/mod.rs b/matrix-authentication-service/crates/handlers/src/oauth2/mod.rs new file mode 100644 index 00000000..cf28818e --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/mod.rs @@ -0,0 +1,119 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashMap; + +use chrono::Duration; +use mas_data_model::{ + AccessToken, Authentication, AuthorizationGrant, BrowserSession, Client, Clock, RefreshToken, + Session, TokenType, +}; +use mas_iana::jose::JsonWebSignatureAlg; +use mas_jose::{ + claims::{self, hash_token}, + constraints::Constrainable, + jwt::{JsonWebSignatureHeader, Jwt}, +}; +use mas_keystore::Keystore; +use mas_router::UrlBuilder; +use mas_storage::RepositoryAccess; +use thiserror::Error; + +pub mod authorization; +pub mod device; +pub mod discovery; +pub mod introspection; +pub mod keys; +pub mod registration; +pub mod revoke; +pub mod token; +pub mod userinfo; +pub mod webfinger; + +#[derive(Debug, Error)] +#[error(transparent)] +pub(crate) enum IdTokenSignatureError { + #[error("The signing key is invalid")] + InvalidSigningKey, + Claim(#[from] mas_jose::claims::ClaimError), + JwtSignature(#[from] mas_jose::jwt::JwtSignatureError), + WrongAlgorithm(#[from] mas_keystore::WrongAlgorithmError), + TokenHash(#[from] mas_jose::claims::TokenHashError), +} + +pub(crate) fn generate_id_token( + rng: &mut (impl rand::RngCore + rand::CryptoRng), + clock: &impl Clock, + url_builder: &UrlBuilder, + key_store: &Keystore, + client: &Client, + grant: Option<&AuthorizationGrant>, + browser_session: &BrowserSession, + access_token: Option<&AccessToken>, + last_authentication: Option<&Authentication>, +) -> Result { + let mut claims = HashMap::new(); + let now = clock.now(); + claims::ISS.insert(&mut claims, url_builder.oidc_issuer().to_string())?; + claims::SUB.insert(&mut claims, &browser_session.user.sub)?; + claims::AUD.insert(&mut claims, client.client_id.clone())?; + claims::IAT.insert(&mut claims, now)?; + claims::EXP.insert(&mut claims, now + Duration::try_hours(1).unwrap())?; + + if let Some(nonce) = grant.and_then(|grant| grant.nonce.as_ref()) { + claims::NONCE.insert(&mut claims, nonce)?; + } + + if let Some(last_authentication) = last_authentication { + claims::AUTH_TIME.insert(&mut claims, last_authentication.created_at)?; + } + + let alg = client + .id_token_signed_response_alg + .clone() + .unwrap_or(JsonWebSignatureAlg::Rs256); + let key = key_store + .signing_key_for_algorithm(&alg) + .ok_or(IdTokenSignatureError::InvalidSigningKey)?; + + if let Some(access_token) = access_token { + claims::AT_HASH.insert(&mut claims, hash_token(&alg, &access_token.access_token)?)?; + } + + if let Some(code) = grant.and_then(|grant| grant.code.as_ref()) { + claims::C_HASH.insert(&mut claims, hash_token(&alg, &code.code)?)?; + } + + let signer = key.params().signing_key_for_alg(&alg)?; + let header = JsonWebSignatureHeader::new(alg) + .with_kid(key.kid().ok_or(IdTokenSignatureError::InvalidSigningKey)?); + let id_token = Jwt::sign_with_rng(rng, header, claims, &signer)?; + + Ok(id_token.into_string()) +} + +pub(crate) async fn generate_token_pair( + rng: &mut (impl rand::RngCore + Send), + clock: &impl Clock, + repo: &mut R, + session: &Session, + ttl: Duration, +) -> Result<(AccessToken, RefreshToken), R::Error> { + let access_token_str = TokenType::AccessToken.generate(rng); + let refresh_token_str = TokenType::RefreshToken.generate(rng); + + let access_token = repo + .oauth2_access_token() + .add(rng, clock, session, access_token_str, Some(ttl)) + .await?; + + let refresh_token = repo + .oauth2_refresh_token() + .add(rng, clock, session, &access_token, refresh_token_str) + .await?; + + Ok((access_token, refresh_token)) +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/registration.rs b/matrix-authentication-service/crates/handlers/src/oauth2/registration.rs new file mode 100644 index 00000000..5ad58b38 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/registration.rs @@ -0,0 +1,619 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::LazyLock; + +use axum::{Json, extract::State, response::IntoResponse}; +use axum_extra::TypedHeader; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::{BoxClock, BoxRng}; +use mas_iana::oauth::OAuthClientAuthenticationMethod; +use mas_keystore::Encrypter; +use mas_policy::{EvaluationResult, Policy}; +use mas_storage::{BoxRepository, oauth2::OAuth2ClientRepository}; +use oauth2_types::{ + errors::{ClientError, ClientErrorCode}, + registration::{ + ClientMetadata, ClientMetadataVerificationError, ClientRegistrationResponse, Localized, + VerifiedClientMetadata, + }, +}; +use opentelemetry::{Key, KeyValue, metrics::Counter}; +use psl::Psl; +use rand::distributions::{Alphanumeric, DistString}; +use serde::Serialize; +use sha2::Digest as _; +use thiserror::Error; +use tracing::info; +use url::Url; + +use crate::{BoundActivityTracker, METER, impl_from_error_for_route}; + +static REGISTRATION_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.oauth2.registration_request") + .with_description("Number of OAuth2 registration requests") + .with_unit("{request}") + .build() +}); +const RESULT: Key = Key::from_static_str("result"); + +#[derive(Debug, Error)] +pub(crate) enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error(transparent)] + JsonExtract(#[from] axum::extract::rejection::JsonRejection), + + #[error("invalid client metadata")] + InvalidClientMetadata(#[from] ClientMetadataVerificationError), + + #[error("{0} is a public suffix, not a valid domain")] + UrlIsPublicSuffix(&'static str), + + #[error("client registration denied by the policy: {0}")] + PolicyDenied(EvaluationResult), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_policy::LoadError); +impl_from_error_for_route!(mas_policy::EvaluationError); +impl_from_error_for_route!(mas_keystore::aead::Error); +impl_from_error_for_route!(serde_json::Error); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!(self, Self::Internal(_)); + + REGISTRATION_COUNTER.add(1, &[KeyValue::new(RESULT, "denied")]); + + let response = match self { + Self::Internal(_) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ClientError::from(ClientErrorCode::ServerError)), + ) + .into_response(), + + // This error happens if we managed to parse the incomiong JSON but it can't be + // deserialized to the expected type. In this case we return an + // `invalid_client_metadata` error with the details of the error. + Self::JsonExtract(axum::extract::rejection::JsonRejection::JsonDataError(e)) => ( + StatusCode::BAD_REQUEST, + Json( + ClientError::from(ClientErrorCode::InvalidClientMetadata) + .with_description(e.to_string()), + ), + ) + .into_response(), + + // For all other JSON errors we return a `invalid_request` error, since this is + // probably due to a malformed request. + Self::JsonExtract(_) => ( + StatusCode::BAD_REQUEST, + Json(ClientError::from(ClientErrorCode::InvalidRequest)), + ) + .into_response(), + + // This error comes from the `ClientMetadata::validate` method. We return an + // `invalid_redirect_uri` error if the error is related to the redirect URIs, else we + // return an `invalid_client_metadata` error. + Self::InvalidClientMetadata( + ClientMetadataVerificationError::MissingRedirectUris + | ClientMetadataVerificationError::RedirectUriWithFragment(_), + ) => ( + StatusCode::BAD_REQUEST, + Json(ClientError::from(ClientErrorCode::InvalidRedirectUri)), + ) + .into_response(), + + Self::InvalidClientMetadata(e) => ( + StatusCode::BAD_REQUEST, + Json( + ClientError::from(ClientErrorCode::InvalidClientMetadata) + .with_description(e.to_string()), + ), + ) + .into_response(), + + // This error happens if the any of the client's URIs are public suffixes. We return + // an `invalid_redirect_uri` error if it's a `redirect_uri`, else we return an + // `invalid_client_metadata` error. + Self::UrlIsPublicSuffix("redirect_uri") => ( + StatusCode::BAD_REQUEST, + Json( + ClientError::from(ClientErrorCode::InvalidRedirectUri) + .with_description("redirect_uri is not using a valid domain".to_owned()), + ), + ) + .into_response(), + + Self::UrlIsPublicSuffix(field) => ( + StatusCode::BAD_REQUEST, + Json( + ClientError::from(ClientErrorCode::InvalidClientMetadata) + .with_description(format!("{field} is not using a valid domain")), + ), + ) + .into_response(), + + // For policy violations, we return an `invalid_client_metadata` error with the details + // of the violations in most cases. If a violation includes `redirect_uri` in the + // message, we return an `invalid_redirect_uri` error instead. + Self::PolicyDenied(evaluation) => { + // TODO: detect them better + let code = if evaluation + .violations + .iter() + .any(|v| v.msg.contains("redirect_uri")) + { + ClientErrorCode::InvalidRedirectUri + } else { + ClientErrorCode::InvalidClientMetadata + }; + + let collected = &evaluation + .violations + .iter() + .map(|v| v.msg.clone()) + .collect::>(); + let joined = collected.join("; "); + + ( + StatusCode::BAD_REQUEST, + Json(ClientError::from(code).with_description(joined)), + ) + .into_response() + } + }; + + (sentry_event_id, response).into_response() + } +} + +#[derive(Serialize)] +struct RouteResponse { + #[serde(flatten)] + response: ClientRegistrationResponse, + #[serde(flatten)] + metadata: VerifiedClientMetadata, +} + +/// Check if the host of the given URL is a public suffix +fn host_is_public_suffix(url: &Url) -> bool { + let host = url.host_str().unwrap_or_default().as_bytes(); + let Some(suffix) = psl::List.suffix(host) else { + // There is no suffix, which is the case for empty hosts, like with custom + // schemes + return false; + }; + + if !suffix.is_known() { + // The suffix is not known, so it's not a public suffix + return false; + } + + // We want to cover two cases: + // - The host is the suffix itself, like `com` + // - The host is a dot followed by the suffix, like `.com` + if host.len() <= suffix.as_bytes().len() + 1 { + // The host only has the suffix in it, so it's a public suffix + return true; + } + + false +} + +/// Check if any of the URLs in the given `Localized` field is a public suffix +fn localised_url_has_public_suffix(url: &Localized) -> bool { + url.iter().any(|(_lang, url)| host_is_public_suffix(url)) +} + +#[tracing::instrument(name = "handlers.oauth2.registration.post", skip_all)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + mut policy: Policy, + activity_tracker: BoundActivityTracker, + user_agent: Option>, + State(encrypter): State, + body: Result, axum::extract::rejection::JsonRejection>, +) -> Result { + // Propagate any JSON extraction error + let Json(body) = body?; + + // Sort the properties to ensure a stable serialisation order for hashing + let body = body.sorted(); + + // We need to serialize the body to compute the hash, and to log it + let body_json = serde_json::to_string(&body)?; + + info!(body = body_json, "Client registration"); + + let user_agent = user_agent.map(|ua| ua.to_string()); + + // Validate the body + let metadata = body.validate()?; + + // Some extra validation that is hard to do in OPA and not done by the + // `validate` method either + if let Some(client_uri) = &metadata.client_uri + && localised_url_has_public_suffix(client_uri) + { + return Err(RouteError::UrlIsPublicSuffix("client_uri")); + } + + if let Some(logo_uri) = &metadata.logo_uri + && localised_url_has_public_suffix(logo_uri) + { + return Err(RouteError::UrlIsPublicSuffix("logo_uri")); + } + + if let Some(policy_uri) = &metadata.policy_uri + && localised_url_has_public_suffix(policy_uri) + { + return Err(RouteError::UrlIsPublicSuffix("policy_uri")); + } + + if let Some(tos_uri) = &metadata.tos_uri + && localised_url_has_public_suffix(tos_uri) + { + return Err(RouteError::UrlIsPublicSuffix("tos_uri")); + } + + if let Some(initiate_login_uri) = &metadata.initiate_login_uri + && host_is_public_suffix(initiate_login_uri) + { + return Err(RouteError::UrlIsPublicSuffix("initiate_login_uri")); + } + + for redirect_uri in metadata.redirect_uris() { + if host_is_public_suffix(redirect_uri) { + return Err(RouteError::UrlIsPublicSuffix("redirect_uri")); + } + } + + let res = policy + .evaluate_client_registration(mas_policy::ClientRegistrationInput { + client_metadata: &metadata, + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent, + }, + }) + .await?; + if !res.valid() { + return Err(RouteError::PolicyDenied(res)); + } + + let (client_secret, encrypted_client_secret) = match metadata.token_endpoint_auth_method { + Some( + OAuthClientAuthenticationMethod::ClientSecretJwt + | OAuthClientAuthenticationMethod::ClientSecretPost + | OAuthClientAuthenticationMethod::ClientSecretBasic, + ) => { + // Let's generate a random client secret + let client_secret = Alphanumeric.sample_string(&mut rng, 20); + let encrypted_client_secret = encrypter.encrypt_to_string(client_secret.as_bytes())?; + (Some(client_secret), Some(encrypted_client_secret)) + } + _ => (None, None), + }; + + // If the client doesn't have a secret, we may be able to deduplicate it. To + // do so, we hash the client metadata, and look for it in the database + let (digest_hash, existing_client) = if client_secret.is_none() { + // XXX: One interesting caveat is that we hash *before* saving to the database. + // It means it takes into account fields that we don't care about *yet*. + // + // This means that if later we start supporting a particular field, we + // will still serve the 'old' client_id, without updating the client in the + // database + let hash = sha2::Sha256::digest(body_json); + let hash = hex::encode(hash); + let client = repo.oauth2_client().find_by_metadata_digest(&hash).await?; + (Some(hash), client) + } else { + (None, None) + }; + + let client = if let Some(client) = existing_client { + tracing::info!(%client.id, "Reusing existing client"); + REGISTRATION_COUNTER.add(1, &[KeyValue::new(RESULT, "reused")]); + client + } else { + let client = repo + .oauth2_client() + .add( + &mut rng, + &clock, + metadata.redirect_uris().to_vec(), + digest_hash, + encrypted_client_secret, + metadata.application_type.clone(), + //&metadata.response_types(), + metadata.grant_types().to_vec(), + metadata + .client_name + .clone() + .map(Localized::to_non_localized), + metadata.logo_uri.clone().map(Localized::to_non_localized), + metadata.client_uri.clone().map(Localized::to_non_localized), + metadata.policy_uri.clone().map(Localized::to_non_localized), + metadata.tos_uri.clone().map(Localized::to_non_localized), + metadata.jwks_uri.clone(), + metadata.jwks.clone(), + // XXX: those might not be right, should be function calls + metadata.id_token_signed_response_alg.clone(), + metadata.userinfo_signed_response_alg.clone(), + metadata.token_endpoint_auth_method.clone(), + metadata.token_endpoint_auth_signing_alg.clone(), + metadata.initiate_login_uri.clone(), + ) + .await?; + tracing::info!(%client.id, "Registered new client"); + REGISTRATION_COUNTER.add(1, &[KeyValue::new(RESULT, "created")]); + client + }; + + let response = ClientRegistrationResponse { + client_id: client.client_id.clone(), + client_secret, + // XXX: we should have a `created_at` field on the clients + client_id_issued_at: Some(client.id.datetime().into()), + client_secret_expires_at: None, + }; + + // We round-trip back to the metadata to output it in the response + // This should never fail, as the client is valid + let metadata = client.into_metadata().validate()?; + + repo.save().await?; + + let response = RouteResponse { response, metadata }; + + Ok((StatusCode::CREATED, Json(response))) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode}; + use mas_router::SimpleRoute; + use oauth2_types::{ + errors::{ClientError, ClientErrorCode}, + registration::ClientRegistrationResponse, + }; + use sqlx::PgPool; + use url::Url; + + use crate::{ + oauth2::registration::host_is_public_suffix, + test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}, + }; + + #[test] + fn test_public_suffix_list() { + fn url_is_public_suffix(url: &str) -> bool { + host_is_public_suffix(&Url::parse(url).unwrap()) + } + + assert!(url_is_public_suffix("https://.com")); + assert!(url_is_public_suffix("https://.com.")); + assert!(url_is_public_suffix("https://co.uk")); + assert!(url_is_public_suffix("https://github.io")); + assert!(!url_is_public_suffix("https://example.com")); + assert!(!url_is_public_suffix("https://example.com.")); + assert!(!url_is_public_suffix("https://x.com")); + assert!(!url_is_public_suffix("https://x.com.")); + assert!(!url_is_public_suffix("https://matrix-org.github.io")); + assert!(!url_is_public_suffix("http://localhost")); + assert!(!url_is_public_suffix("org.matrix:/callback")); + assert!(!url_is_public_suffix("http://somerandominternaldomain")); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_registration_error(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Body is not a JSON + let request = Request::post(mas_router::OAuth2RegistrationEndpoint::PATH) + .body("this is not a json".to_owned()) + .unwrap(); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let response: ClientError = response.json(); + assert_eq!(response.error, ClientErrorCode::InvalidRequest); + + // Invalid client metadata + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "this is not a uri", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let response: ClientError = response.json(); + assert_eq!(response.error, ClientErrorCode::InvalidClientMetadata); + + // Invalid redirect URI + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "application_type": "web", + "client_uri": "https://example.com/", + "redirect_uris": ["http://this-is-insecure.com/"], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let response: ClientError = response.json(); + assert_eq!(response.error, ClientErrorCode::InvalidRedirectUri); + + // Incoherent response types + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "redirect_uris": ["https://example.com/"], + "response_types": ["id_token"], + "grant_types": ["authorization_code"], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let response: ClientError = response.json(); + assert_eq!(response.error, ClientErrorCode::InvalidClientMetadata); + + // Using a public suffix + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://github.io/", + "redirect_uris": ["https://github.io/"], + "response_types": ["code"], + "grant_types": ["authorization_code"], + "token_endpoint_auth_method": "client_secret_basic", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let response: ClientError = response.json(); + assert_eq!(response.error, ClientErrorCode::InvalidClientMetadata); + assert_eq!( + response.error_description.unwrap(), + "client_uri is not using a valid domain" + ); + + // Using a public suffix in a translated URL + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "client_uri#fr-FR": "https://github.io/", + "redirect_uris": ["https://example.com/"], + "response_types": ["code"], + "grant_types": ["authorization_code"], + "token_endpoint_auth_method": "client_secret_basic", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let response: ClientError = response.json(); + assert_eq!(response.error, ClientErrorCode::InvalidClientMetadata); + assert_eq!( + response.error_description.unwrap(), + "client_uri is not using a valid domain" + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_registration(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // A successful registration with no authentication should not return a client + // secret + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "redirect_uris": ["https://example.com/"], + "response_types": ["code"], + "grant_types": ["authorization_code"], + "token_endpoint_auth_method": "none", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let response: ClientRegistrationResponse = response.json(); + assert!(response.client_secret.is_none()); + + // A successful registration with client_secret based authentication should + // return a client secret + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "redirect_uris": ["https://example.com/"], + "response_types": ["code"], + "grant_types": ["authorization_code"], + "token_endpoint_auth_method": "client_secret_basic", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let response: ClientRegistrationResponse = response.json(); + assert!(response.client_secret.is_some()); + } + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_registration_dedupe(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Post a client registration twice, we should get the same client ID + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "client_name": "Example", + "client_name#en": "Example", + "client_name#fr": "Exemple", + "client_name#de": "Beispiel", + "redirect_uris": ["https://example.com/", "https://example.com/callback"], + "response_types": ["code"], + "grant_types": ["authorization_code", "urn:ietf:params:oauth:grant-type:device_code"], + "token_endpoint_auth_method": "none", + })); + + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::CREATED); + let response: ClientRegistrationResponse = response.json(); + let client_id = response.client_id; + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let response: ClientRegistrationResponse = response.json(); + assert_eq!(response.client_id, client_id); + + // Check that the order of some properties doesn't matter + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "client_name": "Example", + "client_name#de": "Beispiel", + "client_name#fr": "Exemple", + "client_name#en": "Example", + "redirect_uris": ["https://example.com/callback", "https://example.com/"], + "response_types": ["code"], + "grant_types": ["urn:ietf:params:oauth:grant-type:device_code", "authorization_code"], + "token_endpoint_auth_method": "none", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let response: ClientRegistrationResponse = response.json(); + assert_eq!(response.client_id, client_id); + + // Doing that with a client that has a client_secret should not deduplicate + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "redirect_uris": ["https://example.com/"], + "response_types": ["code"], + "grant_types": ["authorization_code"], + "token_endpoint_auth_method": "client_secret_basic", + })); + + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::CREATED); + let response: ClientRegistrationResponse = response.json(); + // Sanity check that the client_id is different + assert_ne!(response.client_id, client_id); + let client_id = response.client_id; + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + let response: ClientRegistrationResponse = response.json(); + assert_ne!(response.client_id, client_id); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/revoke.rs b/matrix-authentication-service/crates/handlers/src/oauth2/revoke.rs new file mode 100644 index 00000000..fa11216d --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/revoke.rs @@ -0,0 +1,469 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{Json, extract::State, response::IntoResponse}; +use hyper::StatusCode; +use mas_axum_utils::{ + client_authorization::{ClientAuthorization, CredentialsVerificationError}, + record_error, +}; +use mas_data_model::{BoxClock, BoxRng, TokenType}; +use mas_iana::oauth::OAuthTokenTypeHint; +use mas_keystore::Encrypter; +use mas_storage::{ + BoxRepository, RepositoryAccess, + queue::{QueueJobRepositoryExt as _, SyncDevicesJob}, +}; +use oauth2_types::{ + errors::{ClientError, ClientErrorCode}, + requests::RevocationRequest, +}; +use thiserror::Error; +use ulid::Ulid; + +use crate::{BoundActivityTracker, impl_from_error_for_route}; + +#[derive(Debug, Error)] +pub(crate) enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("bad request")] + BadRequest, + + #[error("client not found")] + ClientNotFound, + + #[error("client not allowed")] + ClientNotAllowed, + + #[error("invalid client credentials for client {client_id}")] + InvalidClientCredentials { + client_id: Ulid, + #[source] + source: CredentialsVerificationError, + }, + + #[error("could not verify client credentials for client {client_id}")] + ClientCredentialsVerification { + client_id: Ulid, + #[source] + source: CredentialsVerificationError, + }, + + #[error("client is unauthorized")] + UnauthorizedClient, + + #[error("unsupported token type")] + UnsupportedTokenType, + + #[error("unknown token")] + UnknownToken, +} + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!(self, Self::Internal(_)); + let response = match self { + Self::Internal(_) | Self::ClientCredentialsVerification { .. } => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ClientError::from(ClientErrorCode::ServerError)), + ) + .into_response(), + + Self::BadRequest => ( + StatusCode::BAD_REQUEST, + Json(ClientError::from(ClientErrorCode::InvalidRequest)), + ) + .into_response(), + + Self::ClientNotFound | Self::InvalidClientCredentials { .. } => ( + StatusCode::UNAUTHORIZED, + Json(ClientError::from(ClientErrorCode::InvalidClient)), + ) + .into_response(), + + Self::ClientNotAllowed | Self::UnauthorizedClient => ( + StatusCode::UNAUTHORIZED, + Json(ClientError::from(ClientErrorCode::UnauthorizedClient)), + ) + .into_response(), + + Self::UnsupportedTokenType => ( + StatusCode::BAD_REQUEST, + Json(ClientError::from(ClientErrorCode::UnsupportedTokenType)), + ) + .into_response(), + + // If the token is unknown, we still return a 200 OK response. + Self::UnknownToken => StatusCode::OK.into_response(), + }; + + (sentry_event_id, response).into_response() + } +} + +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl From for RouteError { + fn from(_e: mas_data_model::TokenFormatError) -> Self { + Self::UnknownToken + } +} + +#[tracing::instrument( + name = "handlers.oauth2.revoke.post", + fields(client.id = client_authorization.client_id()), + skip_all, +)] +pub(crate) async fn post( + clock: BoxClock, + mut rng: BoxRng, + State(http_client): State, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + State(encrypter): State, + client_authorization: ClientAuthorization, +) -> Result { + let client = client_authorization + .credentials + .fetch(&mut repo) + .await? + .ok_or(RouteError::ClientNotFound)?; + + let method = client + .token_endpoint_auth_method + .as_ref() + .ok_or(RouteError::ClientNotAllowed)?; + + client_authorization + .credentials + .verify(&http_client, &encrypter, method, &client) + .await + .map_err(|err| { + if err.is_internal() { + RouteError::ClientCredentialsVerification { + client_id: client.id, + source: err, + } + } else { + RouteError::InvalidClientCredentials { + client_id: client.id, + source: err, + } + } + })?; + + let Some(form) = client_authorization.form else { + return Err(RouteError::BadRequest); + }; + + let token_type = TokenType::check(&form.token)?; + + // Find the ID of the session to end. + let session_id = match (form.token_type_hint, token_type) { + (Some(OAuthTokenTypeHint::AccessToken) | None, TokenType::AccessToken) => { + let access_token = repo + .oauth2_access_token() + .find_by_token(&form.token) + .await? + .ok_or(RouteError::UnknownToken)?; + + if !access_token.is_valid(clock.now()) { + return Err(RouteError::UnknownToken); + } + access_token.session_id + } + + (Some(OAuthTokenTypeHint::RefreshToken) | None, TokenType::RefreshToken) => { + let refresh_token = repo + .oauth2_refresh_token() + .find_by_token(&form.token) + .await? + .ok_or(RouteError::UnknownToken)?; + + if !refresh_token.is_valid() { + return Err(RouteError::UnknownToken); + } + + refresh_token.session_id + } + + // This case can happen if there is a mismatch between the token type hint and the guessed + // token type or if the token was a compat access/refresh token. In those cases, we return + // an unknown token error. + (Some(OAuthTokenTypeHint::AccessToken | OAuthTokenTypeHint::RefreshToken) | None, _) => { + return Err(RouteError::UnknownToken); + } + + (Some(_), _) => return Err(RouteError::UnsupportedTokenType), + }; + + let session = repo + .oauth2_session() + .lookup(session_id) + .await? + .ok_or(RouteError::UnknownToken)?; + + // Check that the session is still valid. + if !session.is_valid() { + return Err(RouteError::UnknownToken); + } + + // Check that the client ending the session is the same as the client that + // created it. + if client.id != session.client_id { + return Err(RouteError::UnauthorizedClient); + } + + activity_tracker + .record_oauth2_session(&clock, &session) + .await; + + // If the session is associated with a user, make sure we schedule a device + // deletion job for all the devices associated with the session. + if let Some(user_id) = session.user_id { + // Fetch the user + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::UnknownToken)?; + + // Schedule a job to sync the devices of the user with the homeserver + repo.queue_job() + .schedule_job(&mut rng, &clock, SyncDevicesJob::new(&user)) + .await?; + } + + // Now that we checked everything, we can end the session. + repo.oauth2_session().finish(&clock, session).await?; + + repo.save().await?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use hyper::Request; + use mas_data_model::{AccessToken, RefreshToken}; + use mas_router::SimpleRoute; + use mas_storage::RepositoryAccess; + use oauth2_types::{ + registration::ClientRegistrationResponse, + requests::AccessTokenResponse, + scope::{OPENID, Scope}, + }; + use sqlx::PgPool; + + use super::*; + use crate::{ + oauth2::generate_token_pair, + test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}, + }; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_revoke_access_token(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "redirect_uris": ["https://example.com/callback"], + "token_endpoint_auth_method": "client_secret_post", + "response_types": ["code"], + "grant_types": ["authorization_code", "refresh_token"], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let client_registration: ClientRegistrationResponse = response.json(); + + let client_id = client_registration.client_id; + let client_secret = client_registration.client_secret.unwrap(); + + // Let's provision a user and create a session for them. This part is hard to + // test with just HTTP requests, so we'll use the repository directly. + let mut repo = state.repository().await.unwrap(); + + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let browser_session = repo + .browser_session() + .add(&mut state.rng(), &state.clock, &user, None) + .await + .unwrap(); + + // Lookup the client in the database. + let client = repo + .oauth2_client() + .find_by_client_id(&client_id) + .await + .unwrap() + .unwrap(); + + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut state.rng(), + &state.clock, + &client, + &browser_session, + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + + let (AccessToken { access_token, .. }, RefreshToken { refresh_token, .. }) = + generate_token_pair( + &mut state.rng(), + &state.clock, + &mut repo, + &session, + Duration::microseconds(5 * 60 * 1000 * 1000), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Check that the token is valid + assert!(state.is_access_token_valid(&access_token).await); + + // Now let's revoke the access token. + let request = Request::post(mas_router::OAuth2Revocation::PATH).form(serde_json::json!({ + "token": access_token, + "token_type_hint": "access_token", + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + // Check that the token is no longer valid + assert!(!state.is_access_token_valid(&access_token).await); + + // Revoking a second time shouldn't fail + let request = Request::post(mas_router::OAuth2Revocation::PATH).form(serde_json::json!({ + "token": access_token, + "token_type_hint": "access_token", + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + // Try using the refresh token to get a new access token, it should fail. + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + + // Now try with a new grant, and by revoking the refresh token instead + let mut repo = state.repository().await.unwrap(); + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut state.rng(), + &state.clock, + &client, + &browser_session, + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + + let (AccessToken { access_token, .. }, RefreshToken { refresh_token, .. }) = + generate_token_pair( + &mut state.rng(), + &state.clock, + &mut repo, + &session, + Duration::microseconds(5 * 60 * 1000 * 1000), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Use the refresh token to get a new access token. + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let old_access_token = access_token; + let old_refresh_token = refresh_token; + let AccessTokenResponse { + access_token, + refresh_token, + .. + } = response.json(); + assert!(state.is_access_token_valid(&access_token).await); + assert!(!state.is_access_token_valid(&old_access_token).await); + + // Revoking the old access token shouldn't do anything. + let request = Request::post(mas_router::OAuth2Revocation::PATH).form(serde_json::json!({ + "token": old_access_token, + "token_type_hint": "access_token", + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + assert!(state.is_access_token_valid(&access_token).await); + + // Revoking the old refresh token shouldn't do anything. + let request = Request::post(mas_router::OAuth2Revocation::PATH).form(serde_json::json!({ + "token": old_refresh_token, + "token_type_hint": "refresh_token", + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + assert!(state.is_access_token_valid(&access_token).await); + + // Revoking the new refresh token should invalidate the session + let request = Request::post(mas_router::OAuth2Revocation::PATH).form(serde_json::json!({ + "token": refresh_token, + "token_type_hint": "refresh_token", + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + assert!(!state.is_access_token_valid(&access_token).await); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/token.rs b/matrix-authentication-service/crates/handlers/src/oauth2/token.rs new file mode 100644 index 00000000..696c7d42 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/token.rs @@ -0,0 +1,1919 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::{Arc, LazyLock}; + +use axum::{Json, extract::State, response::IntoResponse}; +use axum_extra::typed_header::TypedHeader; +use chrono::Duration; +use headers::{CacheControl, HeaderMap, HeaderMapExt, Pragma}; +use hyper::StatusCode; +use mas_axum_utils::{ + client_authorization::{ClientAuthorization, CredentialsVerificationError}, + record_error, +}; +use mas_data_model::{ + AuthorizationGrantStage, BoxClock, BoxRng, Client, Clock, Device, DeviceCodeGrantState, + SiteConfig, TokenType, +}; +use mas_i18n::DataLocale; +use mas_keystore::{Encrypter, Keystore}; +use mas_matrix::HomeserverConnection; +use mas_oidc_client::types::scope::ScopeToken; +use mas_policy::Policy; +use mas_router::UrlBuilder; +use mas_storage::{ + BoxRepository, RepositoryAccess, + oauth2::{ + OAuth2AccessTokenRepository, OAuth2AuthorizationGrantRepository, + OAuth2RefreshTokenRepository, OAuth2SessionRepository, + }, + user::BrowserSessionRepository, +}; +use mas_templates::{DeviceNameContext, TemplateContext, Templates}; +use oauth2_types::{ + errors::{ClientError, ClientErrorCode}, + pkce::CodeChallengeError, + requests::{ + AccessTokenRequest, AccessTokenResponse, AuthorizationCodeGrant, ClientCredentialsGrant, + DeviceCodeGrant, GrantType, RefreshTokenGrant, + }, + scope, +}; +use opentelemetry::{Key, KeyValue, metrics::Counter}; +use thiserror::Error; +use tracing::{debug, info, warn}; +use ulid::Ulid; + +use super::{generate_id_token, generate_token_pair}; +use crate::{BoundActivityTracker, METER, impl_from_error_for_route}; + +static TOKEN_REQUEST_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.oauth2.token_request") + .with_description("How many OAuth 2.0 token requests have gone through") + .with_unit("{request}") + .build() +}); +const GRANT_TYPE: Key = Key::from_static_str("grant_type"); +const RESULT: Key = Key::from_static_str("successful"); + +#[derive(Debug, Error)] +pub(crate) enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("bad request")] + BadRequest, + + #[error("pkce verification failed")] + PkceVerification(#[from] CodeChallengeError), + + #[error("client not found")] + ClientNotFound, + + #[error("client not allowed to use the token endpoint: {0}")] + ClientNotAllowed(Ulid), + + #[error("invalid client credentials for client {client_id}")] + InvalidClientCredentials { + client_id: Ulid, + #[source] + source: CredentialsVerificationError, + }, + + #[error("could not verify client credentials for client {client_id}")] + ClientCredentialsVerification { + client_id: Ulid, + #[source] + source: CredentialsVerificationError, + }, + + #[error("grant not found")] + GrantNotFound, + + #[error("invalid grant {0}")] + InvalidGrant(Ulid), + + #[error("refresh token not found")] + RefreshTokenNotFound, + + #[error("refresh token {0} is invalid")] + RefreshTokenInvalid(Ulid), + + #[error("session {0} is invalid")] + SessionInvalid(Ulid), + + #[error("client id mismatch: expected {expected}, got {actual}")] + ClientIDMismatch { expected: Ulid, actual: Ulid }, + + #[error("policy denied the request: {0}")] + DeniedByPolicy(mas_policy::EvaluationResult), + + #[error("unsupported grant type")] + UnsupportedGrantType, + + #[error("client {0} is not authorized to use this grant type")] + UnauthorizedClient(Ulid), + + #[error("unexpected client {was} (expected {expected})")] + UnexptectedClient { was: Ulid, expected: Ulid }, + + #[error("failed to load browser session {0}")] + NoSuchBrowserSession(Ulid), + + #[error("failed to load oauth session {0}")] + NoSuchOAuthSession(Ulid), + + #[error( + "failed to load the next refresh token ({next:?}) from the previous one ({previous:?})" + )] + NoSuchNextRefreshToken { next: Ulid, previous: Ulid }, + + #[error( + "failed to load the access token ({access_token:?}) associated with the next refresh token ({refresh_token:?})" + )] + NoSuchNextAccessToken { + access_token: Ulid, + refresh_token: Ulid, + }, + + #[error("device code grant expired")] + DeviceCodeExpired, + + #[error("device code grant is still pending")] + DeviceCodePending, + + #[error("device code grant was rejected")] + DeviceCodeRejected, + + #[error("device code grant was already exchanged")] + DeviceCodeExchanged, + + #[error("failed to provision device")] + ProvisionDeviceFailed(#[source] anyhow::Error), +} + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!( + self, + Self::Internal(_) + | Self::ClientCredentialsVerification { .. } + | Self::NoSuchBrowserSession(_) + | Self::NoSuchOAuthSession(_) + | Self::ProvisionDeviceFailed(_) + | Self::NoSuchNextRefreshToken { .. } + | Self::NoSuchNextAccessToken { .. } + ); + + TOKEN_REQUEST_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + + let response = match self { + Self::Internal(_) + | Self::ClientCredentialsVerification { .. } + | Self::NoSuchBrowserSession(_) + | Self::NoSuchOAuthSession(_) + | Self::ProvisionDeviceFailed(_) + | Self::NoSuchNextRefreshToken { .. } + | Self::NoSuchNextAccessToken { .. } => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ClientError::from(ClientErrorCode::ServerError)), + ), + + Self::BadRequest => ( + StatusCode::BAD_REQUEST, + Json(ClientError::from(ClientErrorCode::InvalidRequest)), + ), + + Self::PkceVerification(err) => ( + StatusCode::BAD_REQUEST, + Json( + ClientError::from(ClientErrorCode::InvalidGrant) + .with_description(format!("PKCE verification failed: {err}")), + ), + ), + + Self::ClientNotFound | Self::InvalidClientCredentials { .. } => ( + StatusCode::UNAUTHORIZED, + Json(ClientError::from(ClientErrorCode::InvalidClient)), + ), + + Self::ClientNotAllowed(_) + | Self::UnauthorizedClient(_) + | Self::UnexptectedClient { .. } => ( + StatusCode::UNAUTHORIZED, + Json(ClientError::from(ClientErrorCode::UnauthorizedClient)), + ), + + Self::DeniedByPolicy(evaluation) => ( + StatusCode::FORBIDDEN, + Json( + ClientError::from(ClientErrorCode::InvalidScope).with_description( + evaluation + .violations + .into_iter() + .map(|violation| violation.msg) + .collect::>() + .join(", "), + ), + ), + ), + + Self::DeviceCodeRejected => ( + StatusCode::FORBIDDEN, + Json(ClientError::from(ClientErrorCode::AccessDenied)), + ), + + Self::DeviceCodeExpired => ( + StatusCode::FORBIDDEN, + Json(ClientError::from(ClientErrorCode::ExpiredToken)), + ), + + Self::DeviceCodePending => ( + StatusCode::FORBIDDEN, + Json(ClientError::from(ClientErrorCode::AuthorizationPending)), + ), + + Self::InvalidGrant(_) + | Self::DeviceCodeExchanged + | Self::RefreshTokenNotFound + | Self::RefreshTokenInvalid(_) + | Self::SessionInvalid(_) + | Self::ClientIDMismatch { .. } + | Self::GrantNotFound => ( + StatusCode::BAD_REQUEST, + Json(ClientError::from(ClientErrorCode::InvalidGrant)), + ), + + Self::UnsupportedGrantType => ( + StatusCode::BAD_REQUEST, + Json(ClientError::from(ClientErrorCode::UnsupportedGrantType)), + ), + }; + + (sentry_event_id, response).into_response() + } +} + +impl_from_error_for_route!(mas_i18n::DataError); +impl_from_error_for_route!(mas_templates::TemplateError); +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_policy::EvaluationError); +impl_from_error_for_route!(super::IdTokenSignatureError); + +#[tracing::instrument( + name = "handlers.oauth2.token.post", + fields(client.id = client_authorization.client_id()), + skip_all, +)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + State(http_client): State, + State(key_store): State, + State(url_builder): State, + activity_tracker: BoundActivityTracker, + mut repo: BoxRepository, + State(homeserver): State>, + State(site_config): State, + State(encrypter): State, + State(templates): State, + policy: Policy, + user_agent: Option>, + client_authorization: ClientAuthorization, +) -> Result { + let user_agent = user_agent.map(|ua| ua.as_str().to_owned()); + let client = client_authorization + .credentials + .fetch(&mut repo) + .await? + .ok_or(RouteError::ClientNotFound)?; + + let method = client + .token_endpoint_auth_method + .as_ref() + .ok_or(RouteError::ClientNotAllowed(client.id))?; + + client_authorization + .credentials + .verify(&http_client, &encrypter, method, &client) + .await + .map_err(|err| { + // Classify the error differntly, depending on whether it's an 'internal' error, + // or just because the client presented invalid credentials. + if err.is_internal() { + RouteError::ClientCredentialsVerification { + client_id: client.id, + source: err, + } + } else { + RouteError::InvalidClientCredentials { + client_id: client.id, + source: err, + } + } + })?; + + let form = client_authorization.form.ok_or(RouteError::BadRequest)?; + + let grant_type = form.grant_type(); + + let (reply, repo) = match form { + AccessTokenRequest::AuthorizationCode(grant) => { + authorization_code_grant( + &mut rng, + &clock, + &activity_tracker, + &grant, + &client, + &key_store, + &url_builder, + &site_config, + repo, + &homeserver, + &templates, + user_agent, + ) + .await? + } + AccessTokenRequest::RefreshToken(grant) => { + refresh_token_grant( + &mut rng, + &clock, + &activity_tracker, + &grant, + &client, + &site_config, + repo, + user_agent, + ) + .await? + } + AccessTokenRequest::ClientCredentials(grant) => { + client_credentials_grant( + &mut rng, + &clock, + &activity_tracker, + &grant, + &client, + &site_config, + repo, + policy, + user_agent, + ) + .await? + } + AccessTokenRequest::DeviceCode(grant) => { + device_code_grant( + &mut rng, + &clock, + &activity_tracker, + &grant, + &client, + &key_store, + &url_builder, + &site_config, + repo, + &homeserver, + user_agent, + ) + .await? + } + _ => { + return Err(RouteError::UnsupportedGrantType); + } + }; + + repo.save().await?; + + TOKEN_REQUEST_COUNTER.add( + 1, + &[ + KeyValue::new(GRANT_TYPE, grant_type), + KeyValue::new(RESULT, "success"), + ], + ); + + let mut headers = HeaderMap::new(); + headers.typed_insert(CacheControl::new().with_no_store()); + headers.typed_insert(Pragma::no_cache()); + + Ok((headers, Json(reply))) +} + +async fn authorization_code_grant( + mut rng: &mut BoxRng, + clock: &impl Clock, + activity_tracker: &BoundActivityTracker, + grant: &AuthorizationCodeGrant, + client: &Client, + key_store: &Keystore, + url_builder: &UrlBuilder, + site_config: &SiteConfig, + mut repo: BoxRepository, + homeserver: &Arc, + templates: &Templates, + user_agent: Option, +) -> Result<(AccessTokenResponse, BoxRepository), RouteError> { + // Check that the client is allowed to use this grant type + if !client.grant_types.contains(&GrantType::AuthorizationCode) { + return Err(RouteError::UnauthorizedClient(client.id)); + } + + let authz_grant = repo + .oauth2_authorization_grant() + .find_by_code(&grant.code) + .await? + .ok_or(RouteError::GrantNotFound)?; + + let now = clock.now(); + + let session_id = match authz_grant.stage { + AuthorizationGrantStage::Cancelled { cancelled_at } => { + debug!(%cancelled_at, "Authorization grant was cancelled"); + return Err(RouteError::InvalidGrant(authz_grant.id)); + } + AuthorizationGrantStage::Exchanged { + exchanged_at, + fulfilled_at, + session_id, + } => { + warn!(%exchanged_at, %fulfilled_at, "Authorization code was already exchanged"); + + // Ending the session if the token was already exchanged more than 20s ago + if now - exchanged_at > Duration::microseconds(20 * 1000 * 1000) { + warn!(oauth_session.id = %session_id, "Ending potentially compromised session"); + let session = repo + .oauth2_session() + .lookup(session_id) + .await? + .ok_or(RouteError::NoSuchOAuthSession(session_id))?; + + //if !session.is_finished() { + repo.oauth2_session().finish(clock, session).await?; + repo.save().await?; + //} + } + + return Err(RouteError::InvalidGrant(authz_grant.id)); + } + AuthorizationGrantStage::Pending => { + warn!("Authorization grant has not been fulfilled yet"); + return Err(RouteError::InvalidGrant(authz_grant.id)); + } + AuthorizationGrantStage::Fulfilled { + session_id, + fulfilled_at, + } => { + if now - fulfilled_at > Duration::microseconds(10 * 60 * 1000 * 1000) { + warn!("Code exchange took more than 10 minutes"); + return Err(RouteError::InvalidGrant(authz_grant.id)); + } + + session_id + } + }; + + let mut session = repo + .oauth2_session() + .lookup(session_id) + .await? + .ok_or(RouteError::NoSuchOAuthSession(session_id))?; + + // Generate a device name + let lang: DataLocale = authz_grant.locale.as_deref().unwrap_or("en").parse()?; + let ctx = DeviceNameContext::new(client.clone(), user_agent.clone()).with_language(lang); + let device_name = templates.render_device_name(&ctx)?; + + if let Some(user_agent) = user_agent { + session = repo + .oauth2_session() + .record_user_agent(session, user_agent) + .await?; + } + + // This should never happen, since we looked up in the database using the code + let code = authz_grant + .code + .as_ref() + .ok_or(RouteError::InvalidGrant(authz_grant.id))?; + + if client.id != session.client_id { + return Err(RouteError::UnexptectedClient { + was: client.id, + expected: session.client_id, + }); + } + + match (code.pkce.as_ref(), grant.code_verifier.as_ref()) { + (None, None) => {} + // We have a challenge but no verifier (or vice-versa)? Bad request. + (Some(_), None) | (None, Some(_)) => return Err(RouteError::BadRequest), + // If we have both, we need to check the code validity + (Some(pkce), Some(verifier)) => { + pkce.verify(verifier)?; + } + } + + let Some(user_session_id) = session.user_session_id else { + tracing::warn!("No user session associated with this OAuth2 session"); + return Err(RouteError::InvalidGrant(authz_grant.id)); + }; + + let browser_session = repo + .browser_session() + .lookup(user_session_id) + .await? + .ok_or(RouteError::NoSuchBrowserSession(user_session_id))?; + + let last_authentication = repo + .browser_session() + .get_last_authentication(&browser_session) + .await?; + + let ttl = site_config.access_token_ttl; + let (access_token, refresh_token) = + generate_token_pair(&mut rng, clock, &mut repo, &session, ttl).await?; + + let id_token = if session.scope.contains(&scope::OPENID) { + Some(generate_id_token( + &mut rng, + clock, + url_builder, + key_store, + client, + Some(&authz_grant), + &browser_session, + Some(&access_token), + last_authentication.as_ref(), + )?) + } else { + None + }; + + let mut params = AccessTokenResponse::new(access_token.access_token) + .with_expires_in(ttl) + .with_refresh_token(refresh_token.refresh_token) + .with_scope(session.scope.clone()); + + if let Some(id_token) = id_token { + params = params.with_id_token(id_token); + } + + // Lock the user sync to make sure we don't get into a race condition + repo.user() + .acquire_lock_for_sync(&browser_session.user) + .await?; + + // Look for device to provision + for scope in &*session.scope { + if let Some(device) = Device::from_scope_token(scope) { + homeserver + .upsert_device( + &browser_session.user.username, + device.as_str(), + Some(&device_name), + ) + .await + .map_err(RouteError::ProvisionDeviceFailed)?; + } + } + + repo.oauth2_authorization_grant() + .exchange(clock, authz_grant) + .await?; + + // XXX: there is a potential (but unlikely) race here, where the activity for + // the session is recorded before the transaction is committed. We would have to + // save the repository here to fix that. + activity_tracker + .record_oauth2_session(clock, &session) + .await; + + Ok((params, repo)) +} + +async fn refresh_token_grant( + rng: &mut BoxRng, + clock: &impl Clock, + activity_tracker: &BoundActivityTracker, + grant: &RefreshTokenGrant, + client: &Client, + site_config: &SiteConfig, + mut repo: BoxRepository, + user_agent: Option, +) -> Result<(AccessTokenResponse, BoxRepository), RouteError> { + // Check that the client is allowed to use this grant type + if !client.grant_types.contains(&GrantType::RefreshToken) { + return Err(RouteError::UnauthorizedClient(client.id)); + } + + let refresh_token = repo + .oauth2_refresh_token() + .find_by_token(&grant.refresh_token) + .await? + .ok_or(RouteError::RefreshTokenNotFound)?; + + let mut session = repo + .oauth2_session() + .lookup(refresh_token.session_id) + .await? + .ok_or(RouteError::NoSuchOAuthSession(refresh_token.session_id))?; + + // Let's for now record the user agent on each refresh, that should be + // responsive enough and not too much of a burden on the database. + if let Some(user_agent) = user_agent { + session = repo + .oauth2_session() + .record_user_agent(session, user_agent) + .await?; + } + + if !session.is_valid() { + return Err(RouteError::SessionInvalid(session.id)); + } + + if client.id != session.client_id { + // As per https://datatracker.ietf.org/doc/html/rfc6749#section-5.2 + return Err(RouteError::ClientIDMismatch { + expected: session.client_id, + actual: client.id, + }); + } + + if !refresh_token.is_valid() { + // We're seing a refresh token that already has been consumed, this might be a + // double-refresh or a replay attack + + // First, get the next refresh token + let Some(next_refresh_token_id) = refresh_token.next_refresh_token_id() else { + // If we don't have a 'next' refresh token, it may just be because this was + // before we were recording those. Let's just treat it as a replay. + return Err(RouteError::RefreshTokenInvalid(refresh_token.id)); + }; + + let Some(next_refresh_token) = repo + .oauth2_refresh_token() + .lookup(next_refresh_token_id) + .await? + else { + return Err(RouteError::NoSuchNextRefreshToken { + next: next_refresh_token_id, + previous: refresh_token.id, + }); + }; + + // Check if the next refresh token was already consumed or not + if !next_refresh_token.is_valid() { + // XXX: This is a replay, we *may* want to invalidate the session + return Err(RouteError::RefreshTokenInvalid(next_refresh_token.id)); + } + + // Check if the associated access token was already used. + // + // If the access token is no longer present, we assume it was *not* used. + // Tokens can disappear for two main reasons: + // + // - revoked access tokens are deleted after 1 hour + // - expired access tokens are deleted after 30 days + // + // Revoked tokens are not an issue, as the associated refresh token is also + // revoked. For expired tokens, however, we are effectively losing the + // ability to prevent the client from performing a bad double-refresh. + // This measure is intended to enhance security when a refresh token + // leaks. However, the primary goal is to ensure that we do not maintain + // two active branches of the refresh token tree. + // + // Consider these two scenarios: + // + // - Refresh token A is consumed, issuing refresh token B and access token C. + // - The client uses access token C. + // - Access token C expires after some time. + // - If the client then attempts to use refresh token A again: + // - If access token C is still present, the refresh will be rightfully + // declined, as we have proof that it received the new set of tokens. + // - If access token C was cleaned up, the refresh will succeed, issuing + // new tokens but invalidating refresh token B and the original access + // token C. + if let Some(access_token_id) = next_refresh_token.access_token_id { + // Load it + let next_access_token = repo + .oauth2_access_token() + .lookup(access_token_id) + .await? + .ok_or(RouteError::NoSuchNextAccessToken { + access_token: access_token_id, + refresh_token: next_refresh_token_id, + })?; + + if next_access_token.is_used() { + // XXX: This is a replay, we *may* want to invalidate the session + return Err(RouteError::RefreshTokenInvalid(next_refresh_token.id)); + } + + // This could be a double-refresh, see below + repo.oauth2_access_token() + .revoke(clock, next_access_token) + .await?; + } + + // Looks like it's a double-refresh, client lost their refresh token on + // the way back. Let's revoke the unused access and refresh tokens, and + // issue new ones + info!( + oauth2_session.id = %session.id, + oauth2_client.id = %client.id, + %refresh_token.id, + "Refresh token already used, but issued refresh and access tokens are unused. Assuming those were lost; revoking those and reissuing new ones." + ); + + repo.oauth2_refresh_token() + .revoke(clock, next_refresh_token) + .await?; + } + + activity_tracker + .record_oauth2_session(clock, &session) + .await; + + let ttl = site_config.access_token_ttl; + let (new_access_token, new_refresh_token) = + generate_token_pair(rng, clock, &mut repo, &session, ttl).await?; + + let refresh_token = repo + .oauth2_refresh_token() + .consume(clock, refresh_token, &new_refresh_token) + .await?; + + if let Some(access_token_id) = refresh_token.access_token_id { + let access_token = repo.oauth2_access_token().lookup(access_token_id).await?; + if let Some(access_token) = access_token { + // If it is a double-refresh, it might already be revoked + if !access_token.state.is_revoked() { + repo.oauth2_access_token() + .revoke(clock, access_token) + .await?; + } + } + } + + let params = AccessTokenResponse::new(new_access_token.access_token) + .with_expires_in(ttl) + .with_refresh_token(new_refresh_token.refresh_token) + .with_scope(session.scope); + + Ok((params, repo)) +} + +async fn client_credentials_grant( + rng: &mut BoxRng, + clock: &impl Clock, + activity_tracker: &BoundActivityTracker, + grant: &ClientCredentialsGrant, + client: &Client, + site_config: &SiteConfig, + mut repo: BoxRepository, + mut policy: Policy, + user_agent: Option, +) -> Result<(AccessTokenResponse, BoxRepository), RouteError> { + // Check that the client is allowed to use this grant type + if !client.grant_types.contains(&GrantType::ClientCredentials) { + return Err(RouteError::UnauthorizedClient(client.id)); + } + + // Default to an empty scope if none is provided + let scope = grant + .scope + .clone() + .unwrap_or_else(|| std::iter::empty::().collect()); + + // Make the request go through the policy engine + let res = policy + .evaluate_authorization_grant(mas_policy::AuthorizationGrantInput { + user: None, + client, + session_counts: None, + scope: &scope, + grant_type: mas_policy::GrantType::ClientCredentials, + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent: user_agent.clone(), + }, + }) + .await?; + if !res.valid() { + return Err(RouteError::DeniedByPolicy(res)); + } + + // Start the session + let mut session = repo + .oauth2_session() + .add_from_client_credentials(rng, clock, client, scope) + .await?; + + if let Some(user_agent) = user_agent { + session = repo + .oauth2_session() + .record_user_agent(session, user_agent) + .await?; + } + + let ttl = site_config.access_token_ttl; + let access_token_str = TokenType::AccessToken.generate(rng); + + let access_token = repo + .oauth2_access_token() + .add(rng, clock, &session, access_token_str, Some(ttl)) + .await?; + + let mut params = AccessTokenResponse::new(access_token.access_token).with_expires_in(ttl); + + // XXX: there is a potential (but unlikely) race here, where the activity for + // the session is recorded before the transaction is committed. We would have to + // save the repository here to fix that. + activity_tracker + .record_oauth2_session(clock, &session) + .await; + + if !session.scope.is_empty() { + // We only return the scope if it's not empty + params = params.with_scope(session.scope); + } + + Ok((params, repo)) +} + +async fn device_code_grant( + rng: &mut BoxRng, + clock: &impl Clock, + activity_tracker: &BoundActivityTracker, + grant: &DeviceCodeGrant, + client: &Client, + key_store: &Keystore, + url_builder: &UrlBuilder, + site_config: &SiteConfig, + mut repo: BoxRepository, + homeserver: &Arc, + user_agent: Option, +) -> Result<(AccessTokenResponse, BoxRepository), RouteError> { + // Check that the client is allowed to use this grant type + if !client.grant_types.contains(&GrantType::DeviceCode) { + return Err(RouteError::UnauthorizedClient(client.id)); + } + + let grant = repo + .oauth2_device_code_grant() + .find_by_device_code(&grant.device_code) + .await? + .ok_or(RouteError::GrantNotFound)?; + + // Check that the client match + if client.id != grant.client_id { + return Err(RouteError::ClientIDMismatch { + expected: grant.client_id, + actual: client.id, + }); + } + + if grant.expires_at < clock.now() { + return Err(RouteError::DeviceCodeExpired); + } + + let browser_session_id = match &grant.state { + DeviceCodeGrantState::Pending => { + return Err(RouteError::DeviceCodePending); + } + DeviceCodeGrantState::Rejected { .. } => { + return Err(RouteError::DeviceCodeRejected); + } + DeviceCodeGrantState::Exchanged { .. } => { + return Err(RouteError::DeviceCodeExchanged); + } + DeviceCodeGrantState::Fulfilled { + browser_session_id, .. + } => *browser_session_id, + }; + + let browser_session = repo + .browser_session() + .lookup(browser_session_id) + .await? + .ok_or(RouteError::NoSuchBrowserSession(browser_session_id))?; + + // Start the session + let mut session = repo + .oauth2_session() + .add_from_browser_session(rng, clock, client, &browser_session, grant.scope.clone()) + .await?; + + repo.oauth2_device_code_grant() + .exchange(clock, grant, &session) + .await?; + + // XXX: should we get the user agent from the device code grant instead? + if let Some(user_agent) = user_agent { + session = repo + .oauth2_session() + .record_user_agent(session, user_agent) + .await?; + } + + let ttl = site_config.access_token_ttl; + let access_token_str = TokenType::AccessToken.generate(rng); + + let access_token = repo + .oauth2_access_token() + .add(rng, clock, &session, access_token_str, Some(ttl)) + .await?; + + let mut params = + AccessTokenResponse::new(access_token.access_token.clone()).with_expires_in(ttl); + + // If the client uses the refresh token grant type, we also generate a refresh + // token + if client.grant_types.contains(&GrantType::RefreshToken) { + let refresh_token_str = TokenType::RefreshToken.generate(rng); + + let refresh_token = repo + .oauth2_refresh_token() + .add(rng, clock, &session, &access_token, refresh_token_str) + .await?; + + params = params.with_refresh_token(refresh_token.refresh_token); + } + + // If the client asked for an ID token, we generate one + if session.scope.contains(&scope::OPENID) { + let id_token = generate_id_token( + rng, + clock, + url_builder, + key_store, + client, + None, + &browser_session, + Some(&access_token), + None, + )?; + + params = params.with_id_token(id_token); + } + + // Lock the user sync to make sure we don't get into a race condition + repo.user() + .acquire_lock_for_sync(&browser_session.user) + .await?; + + // Look for device to provision + for scope in &*session.scope { + if let Some(device) = Device::from_scope_token(scope) { + homeserver + .upsert_device(&browser_session.user.username, device.as_str(), None) + .await + .map_err(RouteError::ProvisionDeviceFailed)?; + } + } + + // XXX: there is a potential (but unlikely) race here, where the activity for + // the session is recorded before the transaction is committed. We would have to + // save the repository here to fix that. + activity_tracker + .record_oauth2_session(clock, &session) + .await; + + if !session.scope.is_empty() { + // We only return the scope if it's not empty + params = params.with_scope(session.scope); + } + + Ok((params, repo)) +} + +#[cfg(test)] +mod tests { + use hyper::Request; + use mas_data_model::{AccessToken, AuthorizationCode, RefreshToken}; + use mas_router::SimpleRoute; + use oauth2_types::{ + registration::ClientRegistrationResponse, + requests::{DeviceAuthorizationResponse, ResponseMode}, + scope::{OPENID, Scope}, + }; + use sqlx::PgPool; + + use super::*; + use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_auth_code_grant(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "redirect_uris": ["https://example.com/callback"], + "token_endpoint_auth_method": "none", + "response_types": ["code"], + "grant_types": ["authorization_code"], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let ClientRegistrationResponse { client_id, .. } = response.json(); + + // Let's provision a user and create a session for them. This part is hard to + // test with just HTTP requests, so we'll use the repository directly. + let mut repo = state.repository().await.unwrap(); + + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let browser_session = repo + .browser_session() + .add(&mut state.rng(), &state.clock, &user, None) + .await + .unwrap(); + + // Lookup the client in the database. + let client = repo + .oauth2_client() + .find_by_client_id(&client_id) + .await + .unwrap() + .unwrap(); + + // Start a grant + let code = "thisisaverysecurecode"; + let grant = repo + .oauth2_authorization_grant() + .add( + &mut state.rng(), + &state.clock, + &client, + "https://example.com/redirect".parse().unwrap(), + Scope::from_iter([OPENID]), + Some(AuthorizationCode { + code: code.to_owned(), + pkce: None, + }), + Some("state".to_owned()), + Some("nonce".to_owned()), + ResponseMode::Query, + false, + None, + None, + ) + .await + .unwrap(); + + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut state.rng(), + &state.clock, + &client, + &browser_session, + grant.scope.clone(), + ) + .await + .unwrap(); + + // And fulfill it + let grant = repo + .oauth2_authorization_grant() + .fulfill(&state.clock, &session, grant) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Now call the token endpoint to get an access token. + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "authorization_code", + "code": code, + "redirect_uri": grant.redirect_uri, + "client_id": client.client_id, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let AccessTokenResponse { access_token, .. } = response.json(); + + // Check that the token is valid + assert!(state.is_access_token_valid(&access_token).await); + + // Exchange it again, this it should fail + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "authorization_code", + "code": code, + "redirect_uri": grant.redirect_uri, + "client_id": client.client_id, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let error: ClientError = response.json(); + assert_eq!(error.error, ClientErrorCode::InvalidGrant); + + // The token should still be valid + assert!(state.is_access_token_valid(&access_token).await); + + // Now wait a bit + state.clock.advance(Duration::try_minutes(1).unwrap()); + + // Exchange it again, this it should fail + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "authorization_code", + "code": code, + "redirect_uri": grant.redirect_uri, + "client_id": client.client_id, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let error: ClientError = response.json(); + assert_eq!(error.error, ClientErrorCode::InvalidGrant); + + // And it should have revoked the token we got + assert!(!state.is_access_token_valid(&access_token).await); + + // Try another one and wait for too long before exchanging it + let mut repo = state.repository().await.unwrap(); + let code = "thisisanothercode"; + let grant = repo + .oauth2_authorization_grant() + .add( + &mut state.rng(), + &state.clock, + &client, + "https://example.com/redirect".parse().unwrap(), + Scope::from_iter([OPENID]), + Some(AuthorizationCode { + code: code.to_owned(), + pkce: None, + }), + Some("state".to_owned()), + Some("nonce".to_owned()), + ResponseMode::Query, + false, + None, + None, + ) + .await + .unwrap(); + + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut state.rng(), + &state.clock, + &client, + &browser_session, + grant.scope.clone(), + ) + .await + .unwrap(); + + // And fulfill it + let grant = repo + .oauth2_authorization_grant() + .fulfill(&state.clock, &session, grant) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Now wait a bit + state + .clock + .advance(Duration::microseconds(15 * 60 * 1000 * 1000)); + + // Exchange it, it should fail + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "authorization_code", + "code": code, + "redirect_uri": grant.redirect_uri, + "client_id": client.client_id, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let ClientError { error, .. } = response.json(); + assert_eq!(error, ClientErrorCode::InvalidGrant); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_refresh_token_grant(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "redirect_uris": ["https://example.com/callback"], + "token_endpoint_auth_method": "none", + "response_types": ["code"], + "grant_types": ["authorization_code", "refresh_token"], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let ClientRegistrationResponse { client_id, .. } = response.json(); + + // Let's provision a user and create a session for them. This part is hard to + // test with just HTTP requests, so we'll use the repository directly. + let mut repo = state.repository().await.unwrap(); + + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let browser_session = repo + .browser_session() + .add(&mut state.rng(), &state.clock, &user, None) + .await + .unwrap(); + + // Lookup the client in the database. + let client = repo + .oauth2_client() + .find_by_client_id(&client_id) + .await + .unwrap() + .unwrap(); + + // Get a token pair + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut state.rng(), + &state.clock, + &client, + &browser_session, + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + + let (AccessToken { access_token, .. }, RefreshToken { refresh_token, .. }) = + generate_token_pair( + &mut state.rng(), + &state.clock, + &mut repo, + &session, + Duration::microseconds(5 * 60 * 1000 * 1000), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // First check that the token is valid + assert!(state.is_access_token_valid(&access_token).await); + + // Now call the token endpoint to get an access token. + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client.client_id, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let old_access_token = access_token; + let old_refresh_token = refresh_token; + let response: AccessTokenResponse = response.json(); + let access_token = response.access_token; + let refresh_token = response.refresh_token.expect("to have a refresh token"); + + // Check that the new token is valid + assert!(state.is_access_token_valid(&access_token).await); + + // Check that the old token is no longer valid + assert!(!state.is_access_token_valid(&old_access_token).await); + + // Call it again with the old token, it should fail + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": old_refresh_token, + "client_id": client.client_id, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let ClientError { error, .. } = response.json(); + assert_eq!(error, ClientErrorCode::InvalidGrant); + + // Call it again with the new token, it should work + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client.client_id, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let _: AccessTokenResponse = response.json(); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_double_refresh(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "redirect_uris": ["https://example.com/callback"], + "token_endpoint_auth_method": "none", + "response_types": ["code"], + "grant_types": ["authorization_code", "refresh_token"], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let ClientRegistrationResponse { client_id, .. } = response.json(); + + // Let's provision a user and create a session for them. This part is hard to + // test with just HTTP requests, so we'll use the repository directly. + let mut repo = state.repository().await.unwrap(); + + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let browser_session = repo + .browser_session() + .add(&mut state.rng(), &state.clock, &user, None) + .await + .unwrap(); + + // Lookup the client in the database. + let client = repo + .oauth2_client() + .find_by_client_id(&client_id) + .await + .unwrap() + .unwrap(); + + // Get a token pair + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut state.rng(), + &state.clock, + &client, + &browser_session, + Scope::from_iter([OPENID]), + ) + .await + .unwrap(); + + let (AccessToken { access_token, .. }, RefreshToken { refresh_token, .. }) = + generate_token_pair( + &mut state.rng(), + &state.clock, + &mut repo, + &session, + Duration::microseconds(5 * 60 * 1000 * 1000), + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // First check that the token is valid + assert!(state.is_access_token_valid(&access_token).await); + + // Now call the token endpoint to get an access token. + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client.client_id, + })); + + let first_response = state.request(request).await; + first_response.assert_status(StatusCode::OK); + let first_response: AccessTokenResponse = first_response.json(); + + // Call a second time, it should work, as we haven't done anything yet with the + // token + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client.client_id, + })); + + let second_response = state.request(request).await; + second_response.assert_status(StatusCode::OK); + let second_response: AccessTokenResponse = second_response.json(); + + // Check that we got new tokens + assert_ne!(first_response.access_token, second_response.access_token); + assert_ne!(first_response.refresh_token, second_response.refresh_token); + + // Check that the old-new token is invalid + assert!( + !state + .is_access_token_valid(&first_response.access_token) + .await + ); + + // Check that the new-new token is valid + assert!( + state + .is_access_token_valid(&second_response.access_token) + .await + ); + + // Do a third refresh, this one should not work, as we've used the new + // access token + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client.client_id, + })); + + let third_response = state.request(request).await; + third_response.assert_status(StatusCode::BAD_REQUEST); + + // The other reason we consider a new refresh token to be 'used' is if + // it was already used in a refresh + // So, if we do a refresh with the second_response.refresh_token, then + // another refresh with the result, redoing one with + // second_response.refresh_token again should fail + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": second_response.refresh_token, + "client_id": client.client_id, + })); + + // This one is fine + let fourth_response = state.request(request).await; + fourth_response.assert_status(StatusCode::OK); + let fourth_response: AccessTokenResponse = fourth_response.json(); + + // Do another one, it should be fine as well + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": fourth_response.refresh_token, + "client_id": client.client_id, + })); + + let fifth_response = state.request(request).await; + fifth_response.assert_status(StatusCode::OK); + let fifth_response: AccessTokenResponse = fifth_response.json(); + + // But now, if we re-do with the second_response.refresh_token, it should + // fail + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": second_response.refresh_token, + "client_id": client.client_id, + })); + + let sixth_response = state.request(request).await; + sixth_response.assert_status(StatusCode::BAD_REQUEST); + + // One edge-case scenario: after 30 days, expired access tokens are + // deleted, so we can't track accurately if the refresh successful or + // not. In this case we chose to allow the refresh to succeed to avoid + // spuriously logging out the user. + + // Make sure to mark the fifth access token as used + assert!( + state + .is_access_token_valid(&fifth_response.access_token) + .await + ); + + // Make sure to run all the cleanup tasks + // We run the job queue once before advancing the clock to make sure the + // scheduled jobs get scheduled to a time before we advanced the clock + state.run_jobs_in_queue().await; + state.clock.advance(Duration::days(31)); + state.run_jobs_in_queue().await; + + // We're not supposed to be able to use the fourth refresh token, but here we + // are + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": fourth_response.refresh_token, + "client_id": client.client_id, + })); + + let seventh_response = state.request(request).await; + seventh_response.assert_status(StatusCode::OK); + + // And the refresh token we had on the fifth response should now be invalid + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "refresh_token", + "refresh_token": fifth_response.refresh_token, + "client_id": client.client_id, + })); + + let eighth_response = state.request(request).await; + eighth_response.assert_status(StatusCode::BAD_REQUEST); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_client_credentials(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "token_endpoint_auth_method": "client_secret_post", + "grant_types": ["client_credentials"], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let response: ClientRegistrationResponse = response.json(); + let client_id = response.client_id; + let client_secret = response.client_secret.expect("to have a client secret"); + + // Call the token endpoint with an empty scope + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "client_credentials", + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let response: AccessTokenResponse = response.json(); + assert!(response.refresh_token.is_none()); + assert!(response.expires_in.is_some()); + assert!(response.scope.is_none()); + + // Revoke the token + let request = Request::post(mas_router::OAuth2Revocation::PATH).form(serde_json::json!({ + "token": response.access_token, + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + // We should be allowed to ask for the GraphQL API scope + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "client_credentials", + "client_id": client_id, + "client_secret": client_secret, + "scope": "urn:mas:graphql:*" + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let response: AccessTokenResponse = response.json(); + assert!(response.refresh_token.is_none()); + assert!(response.expires_in.is_some()); + assert_eq!(response.scope, Some("urn:mas:graphql:*".parse().unwrap())); + + // Revoke the token + let request = Request::post(mas_router::OAuth2Revocation::PATH).form(serde_json::json!({ + "token": response.access_token, + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + // We should be NOT allowed to ask for the MAS admin scope + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "client_credentials", + "client_id": client_id, + "client_secret": client_secret, + "scope": "urn:mas:admin" + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + + let ClientError { error, .. } = response.json(); + assert_eq!(error, ClientErrorCode::InvalidScope); + + // Now, if we add the client to the admin list in the policy, it should work + let state = { + let mut state = state; + state.policy_factory = crate::test_utils::policy_factory( + "example.com", + serde_json::json!({ + "admin_clients": [client_id] + }), + ) + .await + .unwrap(); + state + }; + + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "client_credentials", + "client_id": client_id, + "client_secret": client_secret, + "scope": "urn:mas:admin" + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let response: AccessTokenResponse = response.json(); + assert!(response.refresh_token.is_none()); + assert!(response.expires_in.is_some()); + assert_eq!(response.scope, Some("urn:mas:admin".parse().unwrap())); + + // Revoke the token + let request = Request::post(mas_router::OAuth2Revocation::PATH).form(serde_json::json!({ + "token": response.access_token, + "client_id": client_id, + "client_secret": client_secret, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_device_code_grant(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "token_endpoint_auth_method": "none", + "grant_types": ["urn:ietf:params:oauth:grant-type:device_code", "refresh_token"], + "response_types": [], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let response: ClientRegistrationResponse = response.json(); + let client_id = response.client_id; + + // Start a device code grant + let request = Request::post(mas_router::OAuth2DeviceAuthorizationEndpoint::PATH).form( + serde_json::json!({ + "client_id": client_id, + "scope": "openid", + }), + ); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let device_grant: DeviceAuthorizationResponse = response.json(); + + // Poll the token endpoint, it should be pending + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "urn:ietf:params:oauth:grant-type:device_code", + "device_code": device_grant.device_code, + "client_id": client_id, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + + let ClientError { error, .. } = response.json(); + assert_eq!(error, ClientErrorCode::AuthorizationPending); + + // Let's provision a user and create a browser session for them. This part is + // hard to test with just HTTP requests, so we'll use the repository + // directly. + let mut repo = state.repository().await.unwrap(); + + let user = repo + .user() + .add(&mut state.rng(), &state.clock, "alice".to_owned()) + .await + .unwrap(); + + let browser_session = repo + .browser_session() + .add(&mut state.rng(), &state.clock, &user, None) + .await + .unwrap(); + + // Find the grant + let grant = repo + .oauth2_device_code_grant() + .find_by_user_code(&device_grant.user_code) + .await + .unwrap() + .unwrap(); + + // And fulfill it + let grant = repo + .oauth2_device_code_grant() + .fulfill(&state.clock, grant, &browser_session) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Now call the token endpoint to get an access token. + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "urn:ietf:params:oauth:grant-type:device_code", + "device_code": grant.device_code, + "client_id": client_id, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let response: AccessTokenResponse = response.json(); + + // Check that the token is valid + assert!(state.is_access_token_valid(&response.access_token).await); + // We advertised the refresh token grant type, so we should have a refresh token + assert!(response.refresh_token.is_some()); + // We asked for the openid scope, so we should have an ID token + assert!(response.id_token.is_some()); + + // Calling it again should fail + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "urn:ietf:params:oauth:grant-type:device_code", + "device_code": grant.device_code, + "client_id": client_id, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + + let ClientError { error, .. } = response.json(); + assert_eq!(error, ClientErrorCode::InvalidGrant); + + // Do another grant and make it expire + let request = Request::post(mas_router::OAuth2DeviceAuthorizationEndpoint::PATH).form( + serde_json::json!({ + "client_id": client_id, + "scope": "openid", + }), + ); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let device_grant: DeviceAuthorizationResponse = response.json(); + + // Poll the token endpoint, it should be pending + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "urn:ietf:params:oauth:grant-type:device_code", + "device_code": device_grant.device_code, + "client_id": client_id, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + + let ClientError { error, .. } = response.json(); + assert_eq!(error, ClientErrorCode::AuthorizationPending); + + state.clock.advance(Duration::try_hours(1).unwrap()); + + // Poll again, it should be expired + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "urn:ietf:params:oauth:grant-type:device_code", + "device_code": device_grant.device_code, + "client_id": client_id, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + + let ClientError { error, .. } = response.json(); + assert_eq!(error, ClientErrorCode::ExpiredToken); + + // Do another grant and reject it + let request = Request::post(mas_router::OAuth2DeviceAuthorizationEndpoint::PATH).form( + serde_json::json!({ + "client_id": client_id, + "scope": "openid", + }), + ); + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + + let device_grant: DeviceAuthorizationResponse = response.json(); + + // Find the grant and reject it + let mut repo = state.repository().await.unwrap(); + + // Find the grant + let grant = repo + .oauth2_device_code_grant() + .find_by_user_code(&device_grant.user_code) + .await + .unwrap() + .unwrap(); + + // And reject it + let grant = repo + .oauth2_device_code_grant() + .reject(&state.clock, grant, &browser_session) + .await + .unwrap(); + + repo.save().await.unwrap(); + + // Poll the token endpoint, it should be rejected + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "urn:ietf:params:oauth:grant-type:device_code", + "device_code": grant.device_code, + "client_id": client_id, + })); + let response = state.request(request).await; + response.assert_status(StatusCode::FORBIDDEN); + + let ClientError { error, .. } = response.json(); + assert_eq!(error, ClientErrorCode::AccessDenied); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_unsupported_grant(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "redirect_uris": ["https://example.com/callback"], + "token_endpoint_auth_method": "client_secret_post", + "grant_types": ["password"], + "response_types": [], + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::CREATED); + + let response: ClientRegistrationResponse = response.json(); + let client_id = response.client_id; + let client_secret = response.client_secret.expect("to have a client secret"); + + // Call the token endpoint with an unsupported grant type + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "password", + "client_id": client_id, + "client_secret": client_secret, + "username": "john", + "password": "hunter2", + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::BAD_REQUEST); + let ClientError { error, .. } = response.json(); + assert_eq!(error, ClientErrorCode::UnsupportedGrantType); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/userinfo.rs b/matrix-authentication-service/crates/handlers/src/oauth2/userinfo.rs new file mode 100644 index 00000000..c939ba9c --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/userinfo.rs @@ -0,0 +1,165 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + Json, + extract::State, + response::{IntoResponse, Response}, +}; +use hyper::StatusCode; +use mas_axum_utils::{ + jwt::JwtResponse, + record_error, + user_authorization::{AuthorizationVerificationError, UserAuthorization}, +}; +use mas_data_model::{BoxClock, BoxRng}; +use mas_jose::{ + constraints::Constrainable, + jwt::{JsonWebSignatureHeader, Jwt}, +}; +use mas_keystore::Keystore; +use mas_router::UrlBuilder; +use mas_storage::{BoxRepository, oauth2::OAuth2ClientRepository}; +use serde::Serialize; +use serde_with::skip_serializing_none; +use thiserror::Error; +use ulid::Ulid; + +use crate::{BoundActivityTracker, impl_from_error_for_route}; + +#[skip_serializing_none] +#[derive(Serialize)] +struct UserInfo { + sub: String, + username: String, +} + +#[derive(Serialize)] +struct SignedUserInfo { + iss: String, + aud: String, + #[serde(flatten)] + user_info: UserInfo, +} + +#[derive(Debug, Error)] +pub enum RouteError { + #[error(transparent)] + Internal(Box), + + #[error("failed to authenticate")] + AuthorizationVerificationError( + #[from] AuthorizationVerificationError, + ), + + #[error("session is not allowed to access the userinfo endpoint")] + Unauthorized, + + #[error("no suitable key found for signing")] + InvalidSigningKey, + + #[error("failed to load client {0}")] + NoSuchClient(Ulid), + + #[error("failed to load user {0}")] + NoSuchUser(Ulid), +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_keystore::WrongAlgorithmError); +impl_from_error_for_route!(mas_jose::jwt::JwtSignatureError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!( + self, + Self::Internal(_) + | Self::InvalidSigningKey + | Self::NoSuchClient(_) + | Self::NoSuchUser(_) + ); + let response = match self { + Self::Internal(_) + | Self::InvalidSigningKey + | Self::NoSuchClient(_) + | Self::NoSuchUser(_) => { + (StatusCode::INTERNAL_SERVER_ERROR, self.to_string()).into_response() + } + Self::AuthorizationVerificationError(_) | Self::Unauthorized => { + StatusCode::UNAUTHORIZED.into_response() + } + }; + + (sentry_event_id, response).into_response() + } +} + +#[tracing::instrument(name = "handlers.oauth2.userinfo.get", skip_all)] +pub async fn get( + mut rng: BoxRng, + clock: BoxClock, + State(url_builder): State, + activity_tracker: BoundActivityTracker, + mut repo: BoxRepository, + State(key_store): State, + user_authorization: UserAuthorization, +) -> Result { + let session = user_authorization.protected(&mut repo, &clock).await?; + + // This endpoint requires the `openid` scope. + if !session.scope.contains("openid") { + return Err(RouteError::Unauthorized); + } + + // Fail if the session is not associated with a user. + let Some(user_id) = session.user_id else { + return Err(RouteError::Unauthorized); + }; + + activity_tracker + .record_oauth2_session(&clock, &session) + .await; + + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::NoSuchUser(user_id))?; + + let user_info = UserInfo { + sub: user.sub.clone(), + username: user.username.clone(), + }; + + let client = repo + .oauth2_client() + .lookup(session.client_id) + .await? + .ok_or(RouteError::NoSuchClient(session.client_id))?; + + repo.save().await?; + + if let Some(alg) = client.userinfo_signed_response_alg { + let key = key_store + .signing_key_for_algorithm(&alg) + .ok_or(RouteError::InvalidSigningKey)?; + + let signer = key.params().signing_key_for_alg(&alg)?; + let header = JsonWebSignatureHeader::new(alg) + .with_kid(key.kid().ok_or(RouteError::InvalidSigningKey)?); + + let user_info = SignedUserInfo { + iss: url_builder.oidc_issuer().to_string(), + aud: client.client_id, + user_info, + }; + + let token = Jwt::sign_with_rng(&mut rng, header, user_info, &signer)?; + Ok(JwtResponse(token).into_response()) + } else { + Ok(Json(user_info).into_response()) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/oauth2/webfinger.rs b/matrix-authentication-service/crates/handlers/src/oauth2/webfinger.rs new file mode 100644 index 00000000..489a8e9e --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/oauth2/webfinger.rs @@ -0,0 +1,47 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{Json, extract::State, response::IntoResponse}; +use axum_extra::{extract::Query, typed_header::TypedHeader}; +use headers::ContentType; +use mas_router::UrlBuilder; +use oauth2_types::webfinger::WebFingerResponse; +use serde::Deserialize; + +#[derive(Deserialize)] +pub(crate) struct Params { + resource: String, + + // TODO: handle multiple rel= + #[serde(default)] + rel: Option, +} + +fn jrd() -> mime::Mime { + "application/jrd+json".parse().unwrap() +} + +#[tracing::instrument(name = "handlers.oauth2.webfinger.get", skip_all)] +pub(crate) async fn get( + Query(params): Query, + State(url_builder): State, +) -> impl IntoResponse { + // TODO: should we validate the subject? + let subject = params.resource; + + let wants_issuer = params + .rel + .iter() + .any(|i| i == "http://openid.net/specs/connect/1.0/issuer"); + + let res = if wants_issuer { + WebFingerResponse::new(subject).with_issuer(url_builder.oidc_issuer()) + } else { + WebFingerResponse::new(subject) + }; + + (TypedHeader(ContentType::from(jrd())), Json(res)) +} diff --git a/matrix-authentication-service/crates/handlers/src/passwords.rs b/matrix-authentication-service/crates/handlers/src/passwords.rs new file mode 100644 index 00000000..6071cf73 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/passwords.rs @@ -0,0 +1,815 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, sync::Arc}; + +use anyhow::Context; +use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier, password_hash::SaltString}; +use futures_util::future::OptionFuture; +use pbkdf2::{Pbkdf2, password_hash}; +use rand::{CryptoRng, RngCore, SeedableRng, distributions::Standard, prelude::Distribution}; +use thiserror::Error; +use zeroize::Zeroizing; +use zxcvbn::zxcvbn; + +pub type SchemeVersion = u16; + +/// The result of a password verification, which is `true` if the password +/// matches the hashed password, and `false` otherwise. +/// +/// In the success case it can also contain additional data, such as the new +/// hashing scheme and the new hashed password. +#[must_use] +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum PasswordVerificationResult { + /// The password matches the stored password hash + Success(T), + /// The password does not match the stored password hash + Failure, +} + +impl PasswordVerificationResult<()> { + fn success() -> Self { + Self::Success(()) + } + + fn failure() -> Self { + Self::Failure + } +} + +impl PasswordVerificationResult { + /// Converts the result into a new result with the given data. + fn with_data(self, data: N) -> PasswordVerificationResult { + match self { + Self::Success(_) => PasswordVerificationResult::Success(data), + Self::Failure => PasswordVerificationResult::Failure, + } + } + + #[must_use] + pub fn is_success(&self) -> bool { + matches!(self, Self::Success(_)) + } +} + +impl From for PasswordVerificationResult<()> { + fn from(value: bool) -> Self { + if value { + Self::success() + } else { + Self::failure() + } + } +} + +#[derive(Debug, Error)] +#[error("Password manager is disabled")] +pub struct PasswordManagerDisabledError; + +#[derive(Clone)] +pub struct PasswordManager { + inner: Option>, +} + +struct InnerPasswordManager { + /// Minimum complexity score of new passwords (between 0 and 4) as evaluated + /// by zxcvbn. + minimum_complexity: u8, + current_hasher: Hasher, + current_version: SchemeVersion, + + /// A map of "old" hashers used only for verification + other_hashers: HashMap, +} + +impl PasswordManager { + /// Creates a new [`PasswordManager`] from an iterator and a minimum allowed + /// complexity score between 0 and 4. The first item in + /// the iterator will be the default hashing scheme. + /// + /// # Errors + /// + /// Returns an error if the iterator was empty + pub fn new>( + minimum_complexity: u8, + iter: I, + ) -> Result { + let mut iter = iter.into_iter(); + + // Take the first hasher as the current hasher + let (current_version, current_hasher) = iter + .next() + .context("Iterator must have at least one item")?; + + // Collect the other hashers in a map used only in verification + let other_hashers = iter.collect(); + + Ok(Self { + inner: Some(Arc::new(InnerPasswordManager { + minimum_complexity, + current_hasher, + current_version, + other_hashers, + })), + }) + } + + /// Creates a new disabled password manager + #[must_use] + pub const fn disabled() -> Self { + Self { inner: None } + } + + /// Checks if the password manager is enabled or not + #[must_use] + pub const fn is_enabled(&self) -> bool { + self.inner.is_some() + } + + /// Get the inner password manager + /// + /// # Errors + /// + /// Returns an error if the password manager is disabled + fn get_inner(&self) -> Result, PasswordManagerDisabledError> { + self.inner.clone().ok_or(PasswordManagerDisabledError) + } + + /// Returns true if and only if the given password satisfies the minimum + /// complexity requirements. + /// + /// # Errors + /// + /// Returns an error if the password manager is disabled + pub fn is_password_complex_enough( + &self, + password: &str, + ) -> Result { + let inner = self.get_inner()?; + let score = zxcvbn(password, &[]); + Ok(u8::from(score.score()) >= inner.minimum_complexity) + } + + /// Hash a password with the default hashing scheme. + /// Returns the version of the hashing scheme used and the hashed password. + /// + /// # Errors + /// + /// Returns an error if the hashing failed or if the password manager is + /// disabled + #[tracing::instrument(name = "passwords.hash", skip_all)] + pub async fn hash( + &self, + rng: R, + password: Zeroizing, + ) -> Result<(SchemeVersion, String), anyhow::Error> { + let inner = self.get_inner()?; + + // Seed a future-local RNG so the RNG passed in parameters doesn't have to be + // 'static + let rng = rand_chacha::ChaChaRng::from_rng(rng)?; + let span = tracing::Span::current(); + + // `inner` is being moved in the blocking task, so we need to copy the version + // first + let version = inner.current_version; + + let hashed = tokio::task::spawn_blocking(move || { + span.in_scope(move || inner.current_hasher.hash_blocking(rng, password)) + }) + .await??; + + Ok((version, hashed)) + } + + /// Verify a password hash for the given hashing scheme. + /// + /// # Errors + /// + /// Returns an error if the password hash verification failed or if the + /// password manager is disabled + #[tracing::instrument(name = "passwords.verify", skip_all, fields(%scheme))] + pub async fn verify( + &self, + scheme: SchemeVersion, + password: Zeroizing, + hashed_password: String, + ) -> Result { + let inner = self.get_inner()?; + let span = tracing::Span::current(); + + let result = tokio::task::spawn_blocking(move || { + span.in_scope(move || { + let hasher = if scheme == inner.current_version { + &inner.current_hasher + } else { + inner + .other_hashers + .get(&scheme) + .context("Hashing scheme not found")? + }; + + hasher.verify_blocking(&hashed_password, password) + }) + }) + .await??; + + Ok(result) + } + + /// Verify a password hash for the given hashing scheme, and upgrade it on + /// the fly, if it was not hashed with the default scheme + /// + /// # Errors + /// + /// Returns an error if the password hash verification failed or if the + /// password manager is disabled + #[tracing::instrument(name = "passwords.verify_and_upgrade", skip_all, fields(%scheme))] + pub async fn verify_and_upgrade( + &self, + rng: R, + scheme: SchemeVersion, + password: Zeroizing, + hashed_password: String, + ) -> Result>, anyhow::Error> { + let inner = self.get_inner()?; + + // If the current scheme isn't the default one, we also hash with the default + // one so that + let new_hash_fut: OptionFuture<_> = (scheme != inner.current_version) + .then(|| self.hash(rng, password.clone())) + .into(); + + let verify_fut = self.verify(scheme, password, hashed_password); + + let (new_hash_res, verify_res) = tokio::join!(new_hash_fut, verify_fut); + let password_result = verify_res?; + + let new_hash = new_hash_res.transpose()?; + + Ok(password_result.with_data(new_hash)) + } +} + +/// A hashing scheme, with an optional pepper +pub struct Hasher { + algorithm: Algorithm, + unicode_normalization: bool, + pepper: Option>, +} + +impl Hasher { + /// Creates a new hashing scheme based on the bcrypt algorithm + #[must_use] + pub const fn bcrypt( + cost: Option, + pepper: Option>, + unicode_normalization: bool, + ) -> Self { + let algorithm = Algorithm::Bcrypt { cost }; + Self { + algorithm, + unicode_normalization, + pepper, + } + } + + /// Creates a new hashing scheme based on the argon2id algorithm + #[must_use] + pub const fn argon2id(pepper: Option>, unicode_normalization: bool) -> Self { + let algorithm = Algorithm::Argon2id; + Self { + algorithm, + unicode_normalization, + pepper, + } + } + + /// Creates a new hashing scheme based on the pbkdf2 algorithm + #[must_use] + pub const fn pbkdf2(pepper: Option>, unicode_normalization: bool) -> Self { + let algorithm = Algorithm::Pbkdf2; + Self { + algorithm, + unicode_normalization, + pepper, + } + } + + fn normalize_password(&self, password: Zeroizing) -> Zeroizing { + if self.unicode_normalization { + // This is the normalization method used by Synapse + let normalizer = icu_normalizer::ComposingNormalizer::new_nfkc(); + Zeroizing::new(normalizer.normalize(&password)) + } else { + password + } + } + + fn hash_blocking( + &self, + rng: R, + password: Zeroizing, + ) -> Result { + let password = self.normalize_password(password); + + self.algorithm + .hash_blocking(rng, password.as_bytes(), self.pepper.as_deref()) + } + + fn verify_blocking( + &self, + hashed_password: &str, + password: Zeroizing, + ) -> Result { + let password = self.normalize_password(password); + + self.algorithm + .verify_blocking(hashed_password, password.as_bytes(), self.pepper.as_deref()) + } +} + +#[derive(Debug, Clone, Copy)] +enum Algorithm { + Bcrypt { cost: Option }, + Argon2id, + Pbkdf2, +} + +impl Algorithm { + fn hash_blocking( + self, + mut rng: R, + password: &[u8], + pepper: Option<&[u8]>, + ) -> Result { + match self { + Self::Bcrypt { cost } => { + let mut password = Zeroizing::new(password.to_vec()); + if let Some(pepper) = pepper { + password.extend_from_slice(pepper); + } + + let salt = Standard.sample(&mut rng); + + let hashed = bcrypt::hash_with_salt(password, cost.unwrap_or(12), salt)?; + Ok(hashed.format_for_version(bcrypt::Version::TwoB)) + } + + Self::Argon2id => { + let algorithm = argon2::Algorithm::default(); + let version = argon2::Version::default(); + let params = argon2::Params::default(); + + let phf = if let Some(secret) = pepper { + Argon2::new_with_secret(secret, algorithm, version, params)? + } else { + Argon2::new(algorithm, version, params) + }; + + let salt = SaltString::generate(rng); + let hashed = phf.hash_password(password.as_ref(), &salt)?; + Ok(hashed.to_string()) + } + + Self::Pbkdf2 => { + let mut password = Zeroizing::new(password.to_vec()); + if let Some(pepper) = pepper { + password.extend_from_slice(pepper); + } + + let salt = SaltString::generate(rng); + let hashed = Pbkdf2.hash_password(password.as_ref(), &salt)?; + Ok(hashed.to_string()) + } + } + } + + fn verify_blocking( + self, + hashed_password: &str, + password: &[u8], + pepper: Option<&[u8]>, + ) -> Result { + let result = match self { + Algorithm::Bcrypt { .. } => { + let mut password = Zeroizing::new(password.to_vec()); + if let Some(pepper) = pepper { + password.extend_from_slice(pepper); + } + + let result = bcrypt::verify(password, hashed_password)?; + PasswordVerificationResult::from(result) + } + + Algorithm::Argon2id => { + let algorithm = argon2::Algorithm::default(); + let version = argon2::Version::default(); + let params = argon2::Params::default(); + + let phf = if let Some(secret) = pepper { + Argon2::new_with_secret(secret, algorithm, version, params)? + } else { + Argon2::new(algorithm, version, params) + }; + + let hashed_password = PasswordHash::new(hashed_password)?; + + match phf.verify_password(password.as_ref(), &hashed_password) { + Ok(()) => PasswordVerificationResult::success(), + Err(password_hash::Error::Password) => PasswordVerificationResult::failure(), + Err(e) => Err(e)?, + } + } + + Algorithm::Pbkdf2 => { + let mut password = Zeroizing::new(password.to_vec()); + if let Some(pepper) = pepper { + password.extend_from_slice(pepper); + } + + let hashed_password = PasswordHash::new(hashed_password)?; + + match Pbkdf2.verify_password(password.as_ref(), &hashed_password) { + Ok(()) => PasswordVerificationResult::success(), + Err(password_hash::Error::Password) => PasswordVerificationResult::failure(), + Err(e) => Err(e)?, + } + } + }; + + Ok(result) + } +} + +#[cfg(test)] +mod tests { + use rand::SeedableRng; + + use super::*; + + #[test] + fn hashing_bcrypt() { + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + let password = b"hunter2"; + let password2 = b"wrong-password"; + let pepper = b"a-secret-pepper"; + let pepper2 = b"the-wrong-pepper"; + + let alg = Algorithm::Bcrypt { cost: Some(10) }; + // Hash with a pepper + let hash = alg + .hash_blocking(&mut rng, password, Some(pepper)) + .expect("Couldn't hash password"); + insta::assert_snapshot!(hash); + + assert_eq!( + alg.verify_blocking(&hash, password, Some(pepper)) + .expect("Verification failed"), + PasswordVerificationResult::Success(()) + ); + assert_eq!( + alg.verify_blocking(&hash, password2, Some(pepper)) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + assert_eq!( + alg.verify_blocking(&hash, password, Some(pepper2)) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + assert_eq!( + alg.verify_blocking(&hash, password, None) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + + // Hash without pepper + let hash = alg + .hash_blocking(&mut rng, password, None) + .expect("Couldn't hash password"); + insta::assert_snapshot!(hash); + + assert_eq!( + alg.verify_blocking(&hash, password, None) + .expect("Verification failed"), + PasswordVerificationResult::Success(()) + ); + assert_eq!( + alg.verify_blocking(&hash, password2, None) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + assert_eq!( + alg.verify_blocking(&hash, password, Some(pepper)) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + } + + #[test] + fn hashing_argon2id() { + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + let password = b"hunter2"; + let password2 = b"wrong-password"; + let pepper = b"a-secret-pepper"; + let pepper2 = b"the-wrong-pepper"; + + let alg = Algorithm::Argon2id; + // Hash with a pepper + let hash = alg + .hash_blocking(&mut rng, password, Some(pepper)) + .expect("Couldn't hash password"); + insta::assert_snapshot!(hash); + + assert_eq!( + alg.verify_blocking(&hash, password, Some(pepper)) + .expect("Verification failed"), + PasswordVerificationResult::Success(()) + ); + assert_eq!( + alg.verify_blocking(&hash, password2, Some(pepper)) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + assert_eq!( + alg.verify_blocking(&hash, password, Some(pepper2)) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + assert_eq!( + alg.verify_blocking(&hash, password, None) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + + // Hash without pepper + let hash = alg + .hash_blocking(&mut rng, password, None) + .expect("Couldn't hash password"); + insta::assert_snapshot!(hash); + + assert_eq!( + alg.verify_blocking(&hash, password, None) + .expect("Verification failed"), + PasswordVerificationResult::Success(()) + ); + assert_eq!( + alg.verify_blocking(&hash, password2, None) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + assert_eq!( + alg.verify_blocking(&hash, password, Some(pepper)) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + } + + #[test] + #[ignore = "this is particularly slow (20s+ seconds)"] + fn hashing_pbkdf2() { + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + let password = b"hunter2"; + let password2 = b"wrong-password"; + let pepper = b"a-secret-pepper"; + let pepper2 = b"the-wrong-pepper"; + + let alg = Algorithm::Pbkdf2; + // Hash with a pepper + let hash = alg + .hash_blocking(&mut rng, password, Some(pepper)) + .expect("Couldn't hash password"); + insta::assert_snapshot!(hash); + + assert_eq!( + alg.verify_blocking(&hash, password, Some(pepper)) + .expect("Verification failed"), + PasswordVerificationResult::Success(()) + ); + assert_eq!( + alg.verify_blocking(&hash, password2, Some(pepper)) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + assert_eq!( + alg.verify_blocking(&hash, password, Some(pepper2)) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + assert_eq!( + alg.verify_blocking(&hash, password, None) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + + // Hash without pepper + let hash = alg + .hash_blocking(&mut rng, password, None) + .expect("Couldn't hash password"); + insta::assert_snapshot!(hash); + + assert_eq!( + alg.verify_blocking(&hash, password, None) + .expect("Verification failed"), + PasswordVerificationResult::Success(()) + ); + assert_eq!( + alg.verify_blocking(&hash, password2, None) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + assert_eq!( + alg.verify_blocking(&hash, password, Some(pepper)) + .expect("Verification failed"), + PasswordVerificationResult::Failure + ); + } + + #[tokio::test] + async fn hash_verify_and_upgrade() { + // Tests the whole password manager, by hashing a password and upgrading it + // after changing the hashing schemes. The salt generation is done with a seeded + // RNG, so that we can do stable snapshots of hashed passwords + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + let password = Zeroizing::new("hunter2".to_owned()); + let wrong_password = Zeroizing::new("wrong-password".to_owned()); + + let manager = PasswordManager::new( + 0, + [ + // Start with one hashing scheme: the one used by synapse, bcrypt + pepper + ( + 1, + Hasher::bcrypt(Some(10), Some(b"a-secret-pepper".to_vec()), false), + ), + ], + ) + .unwrap(); + + let (version, hash) = manager + .hash(&mut rng, password.clone()) + .await + .expect("Failed to hash"); + + assert_eq!(version, 1); + insta::assert_snapshot!(hash); + + // Just verifying works + let res = manager + .verify(version, password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Success(())); + + // And doesn't work with the wrong password + let res = manager + .verify(version, wrong_password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Failure); + + // Verifying with the wrong version doesn't work + manager + .verify(2, password.clone(), hash.clone()) + .await + .expect_err("Verification should have failed"); + + // Upgrading does nothing + let res = manager + .verify_and_upgrade(&mut rng, version, password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + + assert_eq!(res, PasswordVerificationResult::Success(None)); + + // Upgrading still verify that the password matches + let res = manager + .verify_and_upgrade(&mut rng, version, wrong_password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Failure); + + let manager = PasswordManager::new( + 0, + [ + (2, Hasher::argon2id(None, false)), + ( + 1, + Hasher::bcrypt(Some(10), Some(b"a-secret-pepper".to_vec()), false), + ), + ], + ) + .unwrap(); + + // Verifying still works + let res = manager + .verify(version, password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Success(())); + + // And doesn't work with the wrong password + let res = manager + .verify(version, wrong_password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Failure); + + // Upgrading does re-hash + let res = manager + .verify_and_upgrade(&mut rng, version, password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + + let PasswordVerificationResult::Success(Some((version, hash))) = res else { + panic!("Expected a successful upgrade"); + }; + assert_eq!(version, 2); + insta::assert_snapshot!(hash); + + // Upgrading works with the new hash, but does not upgrade + let res = manager + .verify_and_upgrade(&mut rng, version, password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + + assert_eq!(res, PasswordVerificationResult::Success(None)); + + // Upgrading still verify that the password matches + let res = manager + .verify_and_upgrade(&mut rng, version, wrong_password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Failure); + + // Upgrading still verify that the password matches + let res = manager + .verify_and_upgrade(&mut rng, version, wrong_password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Failure); + + let manager = PasswordManager::new( + 0, + [ + ( + 3, + Hasher::argon2id(Some(b"a-secret-pepper".to_vec()), false), + ), + (2, Hasher::argon2id(None, false)), + ( + 1, + Hasher::bcrypt(Some(10), Some(b"a-secret-pepper".to_vec()), false), + ), + ], + ) + .unwrap(); + + // Verifying still works + let res = manager + .verify(version, password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Success(())); + + // And doesn't work with the wrong password + let res = manager + .verify(version, wrong_password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Failure); + + // Upgrading does re-hash + let res = manager + .verify_and_upgrade(&mut rng, version, password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + + let PasswordVerificationResult::Success(Some((version, hash))) = res else { + panic!("Expected a successful upgrade"); + }; + + assert_eq!(version, 3); + insta::assert_snapshot!(hash); + + // Upgrading works with the new hash, but does not upgrade + let res = manager + .verify_and_upgrade(&mut rng, version, password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + + assert_eq!(res, PasswordVerificationResult::Success(None)); + + // Upgrading still verify that the password matches + let res = manager + .verify_and_upgrade(&mut rng, version, wrong_password.clone(), hash.clone()) + .await + .expect("Failed to verify"); + assert_eq!(res, PasswordVerificationResult::Failure); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/preferred_language.rs b/matrix-authentication-service/crates/handlers/src/preferred_language.rs new file mode 100644 index 00000000..8ea38c99 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/preferred_language.rs @@ -0,0 +1,50 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{convert::Infallible, sync::Arc}; + +use axum::{ + extract::{FromRef, FromRequestParts}, + http::request::Parts, +}; +use headers::HeaderMapExt as _; +use mas_axum_utils::language_detection::AcceptLanguage; +use mas_i18n::{DataLocale, Translator, locale}; + +pub struct PreferredLanguage(pub DataLocale); + +impl FromRequestParts for PreferredLanguage +where + S: Send + Sync, + Arc: FromRef, +{ + type Rejection = Infallible; + + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + let translator: Arc = FromRef::from_ref(state); + let accept_language = parts.headers.typed_get::(); + + let iter = accept_language + .iter() + .flat_map(AcceptLanguage::iter) + .flat_map(|lang| { + let lang = DataLocale::from(lang); + // XXX: this is hacky as we may want to actually maintain proper language + // aliases at some point, but `zh-CN` doesn't fallback + // automatically to `zh-Hans`, so we insert it manually here. + // For some reason, `zh-TW` does fallback to `zh-Hant` correctly. + if lang == locale!("zh-CN").into() { + vec![lang, locale!("zh-Hans").into()] + } else { + vec![lang] + } + }); + + let locale = translator.choose_locale(iter); + + Ok(PreferredLanguage(locale)) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/rate_limit.rs b/matrix-authentication-service/crates/handlers/src/rate_limit.rs new file mode 100644 index 00000000..0471e635 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/rate_limit.rs @@ -0,0 +1,376 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{net::IpAddr, sync::Arc, time::Duration}; + +use governor::{RateLimiter, clock::QuantaClock, state::keyed::DashMapStateStore}; +use mas_config::RateLimitingConfig; +use mas_data_model::{User, UserEmailAuthentication}; +use ulid::Ulid; + +#[derive(Debug, Clone, thiserror::Error)] +pub enum AccountRecoveryLimitedError { + #[error("Too many account recovery requests for requester {0}")] + Requester(RequesterFingerprint), + + #[error("Too many account recovery requests for e-mail {0}")] + Email(String), +} + +#[derive(Debug, Clone, Copy, thiserror::Error)] +pub enum PasswordCheckLimitedError { + #[error("Too many password checks for requester {0}")] + Requester(RequesterFingerprint), + + #[error("Too many password checks for user {0}")] + User(Ulid), +} + +#[derive(Debug, Clone, thiserror::Error)] +pub enum RegistrationLimitedError { + #[error("Too many account registration requests for requester {0}")] + Requester(RequesterFingerprint), +} + +#[derive(Debug, Clone, thiserror::Error)] +pub enum EmailAuthenticationLimitedError { + #[error("Too many email authentication requests for requester {0}")] + Requester(RequesterFingerprint), + + #[error("Too many email authentication requests for authentication session {0}")] + Authentication(Ulid), + + #[error("Too many email authentication requests for email {0}")] + Email(String), +} + +/// Key used to rate limit requests per requester +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct RequesterFingerprint { + ip: Option, +} + +impl std::fmt::Display for RequesterFingerprint { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(ip) = self.ip { + write!(f, "{ip}") + } else { + write!(f, "(NO CLIENT IP)") + } + } +} + +impl RequesterFingerprint { + /// An anonymous key with no IP address set. This should not be used in + /// production, and we should warn users if we can't find their client IPs. + pub const EMPTY: Self = Self { ip: None }; + + /// Create a new anonymous key with the given IP address + #[must_use] + pub const fn new(ip: IpAddr) -> Self { + Self { ip: Some(ip) } + } +} + +/// Rate limiters for the different operations +#[derive(Debug, Clone)] +pub struct Limiter { + inner: Arc, +} + +type KeyedRateLimiter = RateLimiter, QuantaClock>; + +#[derive(Debug)] +struct LimiterInner { + account_recovery_per_requester: KeyedRateLimiter, + account_recovery_per_email: KeyedRateLimiter, + password_check_for_requester: KeyedRateLimiter, + password_check_for_user: KeyedRateLimiter, + registration_per_requester: KeyedRateLimiter, + email_authentication_per_requester: KeyedRateLimiter, + email_authentication_per_email: KeyedRateLimiter, + email_authentication_emails_per_session: KeyedRateLimiter, + email_authentication_attempt_per_session: KeyedRateLimiter, +} + +impl LimiterInner { + fn new(config: &RateLimitingConfig) -> Option { + Some(Self { + account_recovery_per_requester: RateLimiter::keyed( + config.account_recovery.per_ip.to_quota()?, + ), + account_recovery_per_email: RateLimiter::keyed( + config.account_recovery.per_address.to_quota()?, + ), + password_check_for_requester: RateLimiter::keyed(config.login.per_ip.to_quota()?), + password_check_for_user: RateLimiter::keyed(config.login.per_account.to_quota()?), + registration_per_requester: RateLimiter::keyed(config.registration.to_quota()?), + email_authentication_per_email: RateLimiter::keyed( + config.email_authentication.per_address.to_quota()?, + ), + email_authentication_per_requester: RateLimiter::keyed( + config.email_authentication.per_ip.to_quota()?, + ), + email_authentication_emails_per_session: RateLimiter::keyed( + config.email_authentication.emails_per_session.to_quota()?, + ), + email_authentication_attempt_per_session: RateLimiter::keyed( + config.email_authentication.attempt_per_session.to_quota()?, + ), + }) + } +} + +impl Limiter { + /// Creates a new `Limiter` based on a `RateLimitingConfig`. + /// + /// If the config is not valid, returns `None`. + /// (This should not happen if the config was validated, though.) + #[must_use] + pub fn new(config: &RateLimitingConfig) -> Option { + Some(Self { + inner: Arc::new(LimiterInner::new(config)?), + }) + } + + /// Start the rate limiter housekeeping task + /// + /// This task will periodically remove old entries from the rate limiters, + /// to make sure we don't build up a huge number of entries in memory. + pub fn start(&self) { + // Spawn a task that will periodically clean the rate limiters + let this = self.clone(); + tokio::spawn(async move { + // Run the task every minute + let mut interval = tokio::time::interval(Duration::from_secs(60)); + interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); + + loop { + // Call the retain_recent method on each rate limiter + this.inner.account_recovery_per_email.retain_recent(); + this.inner.account_recovery_per_requester.retain_recent(); + this.inner.password_check_for_requester.retain_recent(); + this.inner.password_check_for_user.retain_recent(); + this.inner.registration_per_requester.retain_recent(); + this.inner.email_authentication_per_email.retain_recent(); + this.inner + .email_authentication_per_requester + .retain_recent(); + this.inner + .email_authentication_emails_per_session + .retain_recent(); + this.inner + .email_authentication_attempt_per_session + .retain_recent(); + + interval.tick().await; + } + }); + } + + /// Check if an account recovery can be performed + /// + /// # Errors + /// + /// Returns an error if the operation is rate limited. + pub fn check_account_recovery( + &self, + requester: RequesterFingerprint, + email_address: &str, + ) -> Result<(), AccountRecoveryLimitedError> { + self.inner + .account_recovery_per_requester + .check_key(&requester) + .map_err(|_| AccountRecoveryLimitedError::Requester(requester))?; + + // Convert to lowercase to prevent bypassing the limit by enumerating different + // case variations. + // A case-folding transformation may be more proper. + let canonical_email = email_address.to_lowercase(); + self.inner + .account_recovery_per_email + .check_key(&canonical_email) + .map_err(|_| AccountRecoveryLimitedError::Email(canonical_email))?; + + Ok(()) + } + + /// Check if a password check can be performed + /// + /// # Errors + /// + /// Returns an error if the operation is rate limited + pub fn check_password( + &self, + key: RequesterFingerprint, + user: &User, + ) -> Result<(), PasswordCheckLimitedError> { + self.inner + .password_check_for_requester + .check_key(&key) + .map_err(|_| PasswordCheckLimitedError::Requester(key))?; + + self.inner + .password_check_for_user + .check_key(&user.id) + .map_err(|_| PasswordCheckLimitedError::User(user.id))?; + + Ok(()) + } + + /// Check if an account registration can be performed + /// + /// # Errors + /// + /// Returns an error if the operation is rate limited. + pub fn check_registration( + &self, + requester: RequesterFingerprint, + ) -> Result<(), RegistrationLimitedError> { + self.inner + .registration_per_requester + .check_key(&requester) + .map_err(|_| RegistrationLimitedError::Requester(requester))?; + + Ok(()) + } + + /// Check if an email can be sent to the address for an email + /// authentication session + /// + /// # Errors + /// + /// Returns an error if the operation is rate limited. + pub fn check_email_authentication_email( + &self, + requester: RequesterFingerprint, + email: &str, + ) -> Result<(), EmailAuthenticationLimitedError> { + self.inner + .email_authentication_per_requester + .check_key(&requester) + .map_err(|_| EmailAuthenticationLimitedError::Requester(requester))?; + + // Convert to lowercase to prevent bypassing the limit by enumerating different + // case variations. + // A case-folding transformation may be more proper. + let canonical_email = email.to_lowercase(); + self.inner + .email_authentication_per_email + .check_key(&canonical_email) + .map_err(|_| EmailAuthenticationLimitedError::Email(email.to_owned()))?; + Ok(()) + } + + /// Check if an attempt can be done on an email authentication session + /// + /// # Errors + /// + /// Returns an error if the operation is rate limited. + pub fn check_email_authentication_attempt( + &self, + authentication: &UserEmailAuthentication, + ) -> Result<(), EmailAuthenticationLimitedError> { + self.inner + .email_authentication_attempt_per_session + .check_key(&authentication.id) + .map_err(|_| EmailAuthenticationLimitedError::Authentication(authentication.id)) + } + + /// Check if a new authentication code can be sent for an email + /// authentication session + /// + /// # Errors + /// + /// Returns an error if the operation is rate limited. + pub fn check_email_authentication_send_code( + &self, + requester: RequesterFingerprint, + authentication: &UserEmailAuthentication, + ) -> Result<(), EmailAuthenticationLimitedError> { + self.check_email_authentication_email(requester, &authentication.email)?; + self.inner + .email_authentication_emails_per_session + .check_key(&authentication.id) + .map_err(|_| EmailAuthenticationLimitedError::Authentication(authentication.id)) + } +} + +#[cfg(test)] +mod tests { + use mas_data_model::{Clock, User, clock::MockClock}; + use rand::SeedableRng; + + use super::*; + + #[test] + fn test_password_check_limiter() { + let now = MockClock::default().now(); + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + + let limiter = Limiter::new(&RateLimitingConfig::default()).unwrap(); + + // Let's create a lot of requesters to test account-level rate limiting + let requesters: [_; 768] = (0..=255) + .flat_map(|a| (0..3).map(move |b| RequesterFingerprint::new([a, a, b, b].into()))) + .collect::>() + .try_into() + .unwrap(); + + let alice = User { + id: Ulid::from_datetime_with_source(now.into(), &mut rng), + username: "alice".to_owned(), + sub: "123-456".to_owned(), + created_at: now, + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: true, + }; + + let bob = User { + id: Ulid::from_datetime_with_source(now.into(), &mut rng), + username: "bob".to_owned(), + sub: "123-456".to_owned(), + created_at: now, + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: true, + }; + + // Three times the same IP address should be allowed + assert!(limiter.check_password(requesters[0], &alice).is_ok()); + assert!(limiter.check_password(requesters[0], &alice).is_ok()); + assert!(limiter.check_password(requesters[0], &alice).is_ok()); + + // But the fourth time should be rejected + assert!(limiter.check_password(requesters[0], &alice).is_err()); + // Using another user should also be rejected + assert!(limiter.check_password(requesters[0], &bob).is_err()); + + // Using a different IP address should be allowed, the account isn't locked yet + assert!(limiter.check_password(requesters[1], &alice).is_ok()); + + // At this point, we consumed 4 cells out of 1800 on alice, let's distribute the + // requests with other IPs so that we get rate-limited on the account-level + for requester in requesters.iter().skip(2).take(598) { + assert!(limiter.check_password(*requester, &alice).is_ok()); + assert!(limiter.check_password(*requester, &alice).is_ok()); + assert!(limiter.check_password(*requester, &alice).is_ok()); + assert!(limiter.check_password(*requester, &alice).is_err()); + } + + // We now have consumed 4+598*3 = 1798 cells on the account, so we should be + // rejected soon + assert!(limiter.check_password(requesters[600], &alice).is_ok()); + assert!(limiter.check_password(requesters[601], &alice).is_ok()); + assert!(limiter.check_password(requesters[602], &alice).is_err()); + + // The other account isn't rate-limited + assert!(limiter.check_password(requesters[603], &bob).is_ok()); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/session.rs b/matrix-authentication-service/crates/handlers/src/session.rs new file mode 100644 index 00000000..aa3836a2 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/session.rs @@ -0,0 +1,167 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Utilities for showing proposer HTML fallbacks when the user is logged out, +//! locked or deactivated + +use axum::response::{Html, IntoResponse as _, Response}; +use mas_axum_utils::{SessionInfoExt, cookies::CookieJar, csrf::CsrfExt}; +use mas_data_model::{BrowserSession, Clock, User}; +use mas_i18n::DataLocale; +use mas_policy::model::SessionCounts; +use mas_storage::{ + BoxRepository, RepositoryError, compat::CompatSessionFilter, oauth2::OAuth2SessionFilter, + personal::PersonalSessionFilter, +}; +use mas_templates::{AccountInactiveContext, TemplateContext, Templates}; +use rand::RngCore; +use thiserror::Error; + +#[derive(Debug, Error)] +#[error(transparent)] +pub enum SessionLoadError { + Template(#[from] mas_templates::TemplateError), + Repository(#[from] RepositoryError), +} + +#[allow(clippy::large_enum_variant)] +pub enum SessionOrFallback { + MaybeSession { + cookie_jar: CookieJar, + maybe_session: Option, + }, + Fallback { + response: Response, + }, +} + +/// Load a session from the cookie jar, or fall back to an HTML error page if +/// the account is locked, deactivated or logged out +pub async fn load_session_or_fallback( + cookie_jar: CookieJar, + clock: &impl Clock, + rng: impl RngCore, + templates: &Templates, + locale: &DataLocale, + repo: &mut BoxRepository, +) -> Result { + let (session_info, cookie_jar) = cookie_jar.session_info(); + let Some(session_id) = session_info.current_session_id() else { + return Ok(SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session: None, + }); + }; + + let Some(session) = repo.browser_session().lookup(session_id).await? else { + // We looked up the session, but it was not found. Still update the cookie + let session_info = session_info.mark_session_ended(); + let cookie_jar = cookie_jar.update_session_info(&session_info); + return Ok(SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session: None, + }); + }; + + if session.user.deactivated_at.is_some() { + // The account is deactivated, show the 'account deactivated' fallback + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(clock, rng); + let ctx = AccountInactiveContext::new(session.user) + .with_csrf(csrf_token.form_value()) + .with_language(locale.clone()); + let fallback = templates.render_account_deactivated(&ctx)?; + let response = (cookie_jar, Html(fallback)).into_response(); + return Ok(SessionOrFallback::Fallback { response }); + } + + if session.user.locked_at.is_some() { + // The account is locked, show the 'account locked' fallback + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(clock, rng); + let ctx = AccountInactiveContext::new(session.user) + .with_csrf(csrf_token.form_value()) + .with_language(locale.clone()); + let fallback = templates.render_account_locked(&ctx)?; + let response = (cookie_jar, Html(fallback)).into_response(); + return Ok(SessionOrFallback::Fallback { response }); + } + + if session.finished_at.is_some() { + // The session has finished, but the browser still has the cookie. This is + // likely a 'remote' logout, triggered either by an admin or from the + // user-management UI. In this case, we show the 'account logged out' + // fallback. + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(clock, rng); + let ctx = AccountInactiveContext::new(session.user) + .with_csrf(csrf_token.form_value()) + .with_language(locale.clone()); + let fallback = templates.render_account_logged_out(&ctx)?; + let response = (cookie_jar, Html(fallback)).into_response(); + return Ok(SessionOrFallback::Fallback { response }); + } + + Ok(SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session: Some(session), + }) +} + +/// Get a count of sessions for the given user, for the purposes of session +/// limiting. +/// +/// Includes: +/// - OAuth 2 sessions +/// - Compatibility sessions +/// - Personal sessions (unless owned by a different user) +/// +/// # Backstory +/// +/// Originally, we were only intending to count sessions with devices in this +/// result, because those are the entries that are expensive for Synapse and +/// also would not hinder use of deviceless clients (like Element Admin, an +/// admin dashboard). +/// +/// However, to do so, we would need to count only sessions including device +/// scopes. To do this efficiently, we'd need a partial index on sessions +/// including device scopes. +/// +/// It turns out that this can't be done cleanly (as we need to, in Postgres, +/// match scope lists where one of the scopes matches one of 2 known prefixes), +/// at least not without somewhat uncomfortable stored functions. +/// +/// So for simplicity's sake, we now count all sessions. +/// For practical use cases, it's not likely to make a noticeable difference +/// (and maybe it's good that there's an overall limit). +pub(crate) async fn count_user_sessions_for_limiting( + repo: &mut BoxRepository, + user: &User, +) -> Result { + let oauth2 = repo + .oauth2_session() + .count(OAuth2SessionFilter::new().active_only().for_user(user)) + .await? as u64; + + let compat = repo + .compat_session() + .count(CompatSessionFilter::new().active_only().for_user(user)) + .await? as u64; + + // Only include self-owned personal sessions, not administratively-owned ones + let personal = repo + .personal_session() + .count( + PersonalSessionFilter::new() + .active_only() + .for_actor_user(user) + .for_owner_user(user), + ) + .await? as u64; + + Ok(SessionCounts { + total: oauth2 + compat + personal, + oauth2, + compat, + personal, + }) +} diff --git a/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade-2.snap b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade-2.snap new file mode 100644 index 00000000..00b3c9ab --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade-2.snap @@ -0,0 +1,5 @@ +--- +source: crates/handlers/src/passwords.rs +expression: hash +--- +$argon2id$v=19$m=19456,t=2,p=1$4aRFZH7bgRs24delZVap/Q$Y2SNOQuEfwWuBXflRnbJhqpksexRziQ9Wf9BatCuIVY diff --git a/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade-3.snap b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade-3.snap new file mode 100644 index 00000000..62a4cee9 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade-3.snap @@ -0,0 +1,5 @@ +--- +source: crates/handlers/src/passwords.rs +expression: hash +--- +$argon2id$v=19$m=19456,t=2,p=1$1Ke64U6Mrdl5imSjjFRU+g$yirg39x3QVVTxsV5OI4usyIaCw6IRxPl5Li3mQyNmN8 diff --git a/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade.snap b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade.snap new file mode 100644 index 00000000..cf56edf4 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hash_verify_and_upgrade.snap @@ -0,0 +1,5 @@ +--- +source: crates/handlers/src/passwords.rs +expression: hash +--- +$2b$10$1Mgv9BLlKUPw2H3LIWlseeWUiTWF2yZC/.TyzuC3bGuB9XacoEUu6 diff --git a/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_argon2id-2.snap b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_argon2id-2.snap new file mode 100644 index 00000000..b72eb99b --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_argon2id-2.snap @@ -0,0 +1,5 @@ +--- +source: crates/handlers/src/passwords.rs +expression: hash +--- +$argon2id$v=19$m=19456,t=2,p=1$1WdxAF1UChkYSTnJ6NDbKg$ajKAfwlUmkbxITSdh55j+Hvoxzppx20ArNUF44oV9Nk diff --git a/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_argon2id.snap b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_argon2id.snap new file mode 100644 index 00000000..81780432 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_argon2id.snap @@ -0,0 +1,5 @@ +--- +source: crates/handlers/src/passwords.rs +expression: hash +--- +$argon2id$v=19$m=19456,t=2,p=1$eEi11xG8mIOZYxej+ckCaQ$eBeygPqiuImQAaFQOkE6oVkPfqxIGgnqpQd/MwW4YX4 diff --git a/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_bcrypt-2.snap b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_bcrypt-2.snap new file mode 100644 index 00000000..935f4b08 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_bcrypt-2.snap @@ -0,0 +1,5 @@ +--- +source: crates/handlers/src/passwords.rs +expression: hash +--- +$2b$10$mqjtwG6w3GawhuQQdwBCqOt0TQ0V4vGhB.tMuCZO8WL.ycBHkOLca diff --git a/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_bcrypt.snap b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_bcrypt.snap new file mode 100644 index 00000000..381361e2 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_bcrypt.snap @@ -0,0 +1,5 @@ +--- +source: crates/handlers/src/passwords.rs +expression: hash +--- +$2b$10$c/EX8bTbEMfTn4oCvcQyBOR1zPyLmGzZ2pMXoElLASqv2qpq5X15i diff --git a/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_pbkdf2-2.snap b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_pbkdf2-2.snap new file mode 100644 index 00000000..e22f8183 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_pbkdf2-2.snap @@ -0,0 +1,5 @@ +--- +source: crates/handlers/src/passwords.rs +expression: hash +--- +$pbkdf2-sha256$i=600000,l=32$1WdxAF1UChkYSTnJ6NDbKg$uwgJSFAtjA082fY37K09Q5Hjbw3mBjFI/JLW9sw0F2A diff --git a/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_pbkdf2.snap b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_pbkdf2.snap new file mode 100644 index 00000000..ce725343 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/snapshots/mas_handlers__passwords__tests__hashing_pbkdf2.snap @@ -0,0 +1,5 @@ +--- +source: crates/handlers/src/passwords.rs +expression: hash +--- +$pbkdf2-sha256$i=600000,l=32$eEi11xG8mIOZYxej+ckCaQ$uyS+Ip4DieQ9S+m1EcT+vCtuiWpQ3TsDGPLY4mwkOxc diff --git a/matrix-authentication-service/crates/handlers/src/test_utils.rs b/matrix-authentication-service/crates/handlers/src/test_utils.rs new file mode 100644 index 00000000..521a4848 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/test_utils.rs @@ -0,0 +1,887 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + convert::Infallible, + sync::{Arc, Mutex, RwLock}, + task::{Context, Poll}, +}; + +use axum::{ + body::{Bytes, HttpBody}, + extract::{FromRef, FromRequestParts}, + response::{IntoResponse, IntoResponseParts}, +}; +use chrono::Duration; +use cookie_store::{CookieStore, RawCookie}; +use futures_util::future::BoxFuture; +use headers::{Authorization, ContentType, HeaderMapExt, HeaderName, HeaderValue}; +use hyper::{ + Request, Response, StatusCode, + header::{CONTENT_TYPE, COOKIE, SET_COOKIE}, +}; +use mas_axum_utils::{ + ErrorWrapper, + cookies::{CookieJar, CookieManager}, +}; +use mas_config::RateLimitingConfig; +use mas_data_model::{AppVersion, BoxClock, BoxRng, SiteConfig, clock::MockClock}; +use mas_email::{MailTransport, Mailer}; +use mas_i18n::Translator; +use mas_keystore::{Encrypter, JsonWebKey, JsonWebKeySet, Keystore, PrivateKey}; +use mas_matrix::{HomeserverConnection, MockHomeserverConnection}; +use mas_policy::{InstantiateError, Policy, PolicyFactory}; +use mas_router::{SimpleRoute, UrlBuilder}; +use mas_storage::{BoxRepository, BoxRepositoryFactory, RepositoryError, RepositoryFactory}; +use mas_storage_pg::PgRepositoryFactory; +use mas_tasks::QueueWorker; +use mas_templates::{SiteConfigExt, Templates}; +use oauth2_types::{registration::ClientRegistrationResponse, requests::AccessTokenResponse}; +use rand::SeedableRng; +use rand_chacha::ChaChaRng; +use serde::{Serialize, de::DeserializeOwned}; +use sqlx::PgPool; +use tokio_util::{ + sync::{CancellationToken, DropGuard}, + task::TaskTracker, +}; +use tower::{Layer, Service, ServiceExt}; +use url::Url; + +use crate::{ + ActivityTracker, BoundActivityTracker, Limiter, RequesterFingerprint, graphql, + passwords::{Hasher, PasswordManager}, + upstream_oauth2::cache::MetadataCache, +}; + +/// Setup rustcrypto and tracing for tests. +#[allow(unused_must_use)] +pub(crate) fn setup() { + rustls::crypto::aws_lc_rs::default_provider().install_default(); + + tracing_subscriber::fmt() + .with_max_level(tracing::Level::INFO) + .with_test_writer() + .try_init(); +} + +pub(crate) async fn policy_factory( + server_name: &str, + data: serde_json::Value, +) -> Result, anyhow::Error> { + let workspace_root = camino::Utf8Path::new(env!("CARGO_MANIFEST_DIR")) + .join("..") + .join(".."); + + let file = tokio::fs::File::open(workspace_root.join("policies").join("policy.wasm")).await?; + + let entrypoints = mas_policy::Entrypoints { + register: "register/violation".to_owned(), + client_registration: "client_registration/violation".to_owned(), + authorization_grant: "authorization_grant/violation".to_owned(), + compat_login: "compat_login/violation".to_owned(), + email: "email/violation".to_owned(), + }; + + let data = mas_policy::Data::new(server_name.to_owned(), None).with_rest(data); + + let policy_factory = PolicyFactory::load(file, data, entrypoints).await?; + let policy_factory = Arc::new(policy_factory); + Ok(policy_factory) +} + +#[derive(Clone)] +pub(crate) struct TestState { + pub repository_factory: PgRepositoryFactory, + pub templates: Templates, + pub key_store: Keystore, + pub cookie_manager: CookieManager, + pub metadata_cache: MetadataCache, + pub encrypter: Encrypter, + pub url_builder: UrlBuilder, + pub homeserver_connection: Arc, + pub policy_factory: Arc, + pub graphql_schema: graphql::Schema, + pub password_manager: PasswordManager, + pub site_config: SiteConfig, + pub activity_tracker: ActivityTracker, + pub limiter: Limiter, + pub clock: Arc, + pub rng: Arc>, + pub http_client: reqwest::Client, + pub task_tracker: TaskTracker, + queue_worker: Arc>, + + #[allow(dead_code)] // It is used, as it will cancel the CancellationToken when dropped + cancellation_drop_guard: Arc, +} + +fn workspace_root() -> camino::Utf8PathBuf { + camino::Utf8Path::new(env!("CARGO_MANIFEST_DIR")) + .join("..") + .join("..") + .canonicalize_utf8() + .unwrap() +} + +pub fn test_site_config() -> SiteConfig { + SiteConfig { + access_token_ttl: Duration::try_minutes(5).unwrap(), + compat_token_ttl: Duration::try_minutes(5).unwrap(), + server_name: "example.com".to_owned(), + policy_uri: Some("https://example.com/policy".parse().unwrap()), + tos_uri: Some("https://example.com/tos".parse().unwrap()), + imprint: None, + password_login_enabled: true, + password_registration_enabled: true, + registration_token_required: false, + email_change_allowed: true, + displayname_change_allowed: true, + password_change_allowed: true, + password_registration_email_required: true, + account_recovery_allowed: true, + account_deactivation_allowed: true, + captcha: None, + minimum_password_complexity: 1, + session_expiration: None, + login_with_email_allowed: true, + plan_management_iframe_uri: None, + session_limit: None, + } +} + +impl TestState { + /// Create a new test state from the given database pool + pub async fn from_pool(pool: PgPool) -> Result { + Self::from_pool_with_site_config(pool, test_site_config()).await + } + + /// Create a new test state from the given database pool and site config + pub async fn from_pool_with_site_config( + pool: PgPool, + site_config: SiteConfig, + ) -> Result { + let workspace_root = workspace_root(); + + let task_tracker = TaskTracker::new(); + let shutdown_token = CancellationToken::new(); + + let url_builder = UrlBuilder::new("https://example.com/".parse()?, None, None); + + let templates = Templates::load( + workspace_root.join("templates"), + url_builder.clone(), + Some(workspace_root.join("frontend/dist/manifest.json")), + workspace_root.join("translations"), + site_config.templates_branding(), + site_config.templates_features(), + // Strict mode in testing + true, + ) + .await?; + + let http_client = mas_http::reqwest_client(); + + // TODO: add more test keys to the store + let rsa = + PrivateKey::load_pem(include_str!("../../keystore/tests/keys/rsa.pkcs1.pem")).unwrap(); + let rsa = JsonWebKey::new(rsa).with_kid("test-rsa"); + + let jwks = JsonWebKeySet::new(vec![rsa]); + let key_store = Keystore::new(jwks); + + let encrypter = Encrypter::new(&[0x42; 32]); + let cookie_manager = CookieManager::derive_from(url_builder.http_base(), &[0x42; 32]); + + let metadata_cache = MetadataCache::new(); + + let password_manager = if site_config.password_login_enabled { + PasswordManager::new( + site_config.minimum_password_complexity, + [(1, Hasher::argon2id(None, false))], + )? + } else { + PasswordManager::disabled() + }; + + let policy_factory = + policy_factory(&site_config.server_name, serde_json::json!({})).await?; + + let homeserver_connection = + Arc::new(MockHomeserverConnection::new(&site_config.server_name)); + + let clock = Arc::new(MockClock::default()); + let rng = Arc::new(Mutex::new(ChaChaRng::seed_from_u64(42))); + + let limiter = Limiter::new(&RateLimitingConfig::default()).unwrap(); + + let graphql_state = TestGraphQLState { + repository_factory: PgRepositoryFactory::new(pool.clone()).boxed(), + policy_factory: Arc::clone(&policy_factory), + homeserver_connection: Arc::clone(&homeserver_connection), + site_config: site_config.clone(), + rng: Arc::clone(&rng), + clock: Arc::clone(&clock), + password_manager: password_manager.clone(), + url_builder: url_builder.clone(), + limiter: limiter.clone(), + }; + let state: crate::graphql::BoxState = Box::new(graphql_state); + + let graphql_schema = graphql::schema_builder().data(state).finish(); + + let activity_tracker = ActivityTracker::new( + PgRepositoryFactory::new(pool.clone()).boxed(), + std::time::Duration::from_secs(60), + &task_tracker, + shutdown_token.child_token(), + ); + + let mailer = Mailer::new( + templates.clone(), + MailTransport::blackhole(), + "hello@example.com".parse().unwrap(), + "hello@example.com".parse().unwrap(), + ); + + let queue_worker = mas_tasks::init( + PgRepositoryFactory::new(pool.clone()), + Arc::clone(&clock), + &mailer, + homeserver_connection.clone(), + url_builder.clone(), + &site_config, + shutdown_token.child_token(), + ) + .await + .unwrap(); + + let queue_worker = Arc::new(tokio::sync::Mutex::new(queue_worker)); + + Ok(Self { + repository_factory: PgRepositoryFactory::new(pool), + templates, + key_store, + cookie_manager, + metadata_cache, + encrypter, + url_builder, + homeserver_connection, + policy_factory, + graphql_schema, + password_manager, + site_config, + activity_tracker, + limiter, + clock, + rng, + http_client, + task_tracker, + queue_worker, + cancellation_drop_guard: Arc::new(shutdown_token.drop_guard()), + }) + } + + /// Run all the available jobs in the queue. + /// + /// Panics if it fails to run the jobs (but not on job failures!) + pub async fn run_jobs_in_queue(&self) { + let mut queue = self.queue_worker.lock().await; + queue.process_all_jobs_in_tests().await.unwrap(); + } + + /// Reset the test utils to a fresh state, with the same configuration. + pub async fn reset(self) -> Self { + let site_config = self.site_config.clone(); + let pool = self.repository_factory.pool(); + let task_tracker = self.task_tracker.clone(); + + // This should trigger the cancellation drop guard + drop(self); + + // Wait for tasks to complete + task_tracker.close(); + task_tracker.wait().await; + + Self::from_pool_with_site_config(pool, site_config) + .await + .unwrap() + } + + pub async fn request(&self, request: Request) -> Response + where + B: HttpBody + Send + 'static, + ::Error: std::error::Error + Send + Sync, + B::Error: std::error::Error + Send + Sync, + B::Data: Send, + { + let app = crate::healthcheck_router() + .merge(crate::discovery_router()) + .merge(crate::api_router()) + .merge(crate::compat_router(self.templates.clone())) + .merge(crate::human_router(self.templates.clone())) + // We enable undocumented_oauth2_access for the tests, as it is easier to query the API + // with it + .merge(crate::graphql_router(false, true)) + .merge(crate::admin_api_router().1) + .with_state(self.clone()) + .into_service(); + + let Ok(mut service) = app.ready_oneshot().await; + let Ok(response) = service.call(request).await; + + let (parts, body) = response.into_parts(); + + // This could actually fail, but do we really care about that? + let body = axum::body::to_bytes(body, usize::MAX) + .await + .expect("Failed to read response body"); + let body = std::str::from_utf8(&body) + .expect("Response body is not valid UTF-8") + .to_owned(); + + Response::from_parts(parts, body) + } + + /// Get a token with the given scope + pub async fn token_with_scope(&mut self, scope: &str) -> String { + // Provision a client + let request = + Request::post(mas_router::OAuth2RegistrationEndpoint::PATH).json(serde_json::json!({ + "client_uri": "https://example.com/", + "token_endpoint_auth_method": "client_secret_post", + "grant_types": ["client_credentials"], + })); + let response = self.request(request).await; + response.assert_status(StatusCode::CREATED); + let response: ClientRegistrationResponse = response.json(); + let client_id = response.client_id; + let client_secret = response.client_secret.expect("to have a client secret"); + + // Make the client admin + let state = { + let mut state = self.clone(); + state.policy_factory = policy_factory( + "example.com", + serde_json::json!({ + "admin_clients": [client_id], + }), + ) + .await + .unwrap(); + state + }; + + // Ask for a token with the admin scope + let request = + Request::post(mas_router::OAuth2TokenEndpoint::PATH).form(serde_json::json!({ + "grant_type": "client_credentials", + "client_id": client_id, + "client_secret": client_secret, + "scope": scope, + })); + + let response = state.request(request).await; + response.assert_status(StatusCode::OK); + let AccessTokenResponse { access_token, .. } = response.json(); + + access_token + } + + pub async fn repository(&self) -> Result { + self.repository_factory.create().await + } + + /// Returns a new random number generator. + /// + /// # Panics + /// + /// Panics if the RNG is already locked. + pub fn rng(&self) -> ChaChaRng { + let mut parent_rng = self.rng.try_lock().expect("Failed to lock RNG"); + ChaChaRng::from_rng(&mut *parent_rng).unwrap() + } + + /// Do a call to the userinfo endpoint to check if the given token is valid. + /// Returns true if the token is valid. + /// + /// # Panics + /// + /// Panics if the response status code is not 200 or 401. + pub async fn is_access_token_valid(&self, token: &str) -> bool { + let request = Request::get(mas_router::OidcUserinfo::PATH) + .bearer(token) + .empty(); + + let response = self.request(request).await; + + match response.status() { + StatusCode::OK => true, + StatusCode::UNAUTHORIZED => false, + _ => panic!("Unexpected status code: {}", response.status()), + } + } + + /// Get an empty cookie jar + pub fn cookie_jar(&self) -> CookieJar { + self.cookie_manager.cookie_jar() + } +} + +struct TestGraphQLState { + repository_factory: BoxRepositoryFactory, + homeserver_connection: Arc, + site_config: SiteConfig, + policy_factory: Arc, + clock: Arc, + rng: Arc>, + password_manager: PasswordManager, + url_builder: UrlBuilder, + limiter: Limiter, +} + +#[async_trait::async_trait] +impl graphql::State for TestGraphQLState { + async fn repository(&self) -> Result { + self.repository_factory.create().await + } + + async fn policy(&self) -> Result { + self.policy_factory.instantiate().await + } + + fn password_manager(&self) -> PasswordManager { + self.password_manager.clone() + } + + fn homeserver_connection(&self) -> &dyn HomeserverConnection { + &self.homeserver_connection + } + + fn url_builder(&self) -> &UrlBuilder { + &self.url_builder + } + + fn clock(&self) -> BoxClock { + Box::new(self.clock.clone()) + } + + fn site_config(&self) -> &SiteConfig { + &self.site_config + } + + fn limiter(&self) -> &Limiter { + &self.limiter + } + + fn rng(&self) -> BoxRng { + let mut parent_rng = self.rng.lock().expect("Failed to lock RNG"); + let rng = ChaChaRng::from_rng(&mut *parent_rng).expect("Failed to seed RNG"); + Box::new(rng) + } +} + +impl FromRef for PgPool { + fn from_ref(input: &TestState) -> Self { + input.repository_factory.pool() + } +} + +impl FromRef for BoxRepositoryFactory { + fn from_ref(input: &TestState) -> Self { + input.repository_factory.clone().boxed() + } +} + +impl FromRef for graphql::Schema { + fn from_ref(input: &TestState) -> Self { + input.graphql_schema.clone() + } +} + +impl FromRef for Templates { + fn from_ref(input: &TestState) -> Self { + input.templates.clone() + } +} + +impl FromRef for Arc { + fn from_ref(input: &TestState) -> Self { + input.templates.translator() + } +} + +impl FromRef for Keystore { + fn from_ref(input: &TestState) -> Self { + input.key_store.clone() + } +} + +impl FromRef for Encrypter { + fn from_ref(input: &TestState) -> Self { + input.encrypter.clone() + } +} + +impl FromRef for UrlBuilder { + fn from_ref(input: &TestState) -> Self { + input.url_builder.clone() + } +} + +impl FromRef for PasswordManager { + fn from_ref(input: &TestState) -> Self { + input.password_manager.clone() + } +} + +impl FromRef for CookieManager { + fn from_ref(input: &TestState) -> Self { + input.cookie_manager.clone() + } +} + +impl FromRef for MetadataCache { + fn from_ref(input: &TestState) -> Self { + input.metadata_cache.clone() + } +} + +impl FromRef for SiteConfig { + fn from_ref(input: &TestState) -> Self { + input.site_config.clone() + } +} + +impl FromRef for Arc { + fn from_ref(input: &TestState) -> Self { + input.policy_factory.clone() + } +} + +impl FromRef for Arc { + fn from_ref(input: &TestState) -> Self { + input.homeserver_connection.clone() + } +} + +impl FromRef for Limiter { + fn from_ref(input: &TestState) -> Self { + input.limiter.clone() + } +} + +impl FromRef for reqwest::Client { + fn from_ref(input: &TestState) -> Self { + input.http_client.clone() + } +} + +impl FromRef for AppVersion { + fn from_ref(_input: &TestState) -> Self { + AppVersion("v0.0.0-test") + } +} + +impl FromRequestParts for ActivityTracker { + type Rejection = Infallible; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + state: &TestState, + ) -> Result { + Ok(state.activity_tracker.clone()) + } +} + +impl FromRequestParts for BoundActivityTracker { + type Rejection = Infallible; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + state: &TestState, + ) -> Result { + let ip = None; + Ok(state.activity_tracker.clone().bind(ip)) + } +} + +impl FromRequestParts for RequesterFingerprint { + type Rejection = Infallible; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + _state: &TestState, + ) -> Result { + Ok(RequesterFingerprint::EMPTY) + } +} + +impl FromRequestParts for BoxClock { + type Rejection = Infallible; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + state: &TestState, + ) -> Result { + Ok(Box::new(state.clock.clone())) + } +} + +impl FromRequestParts for BoxRng { + type Rejection = Infallible; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + state: &TestState, + ) -> Result { + let mut parent_rng = state.rng.lock().expect("Failed to lock RNG"); + let rng = ChaChaRng::from_rng(&mut *parent_rng).expect("Failed to seed RNG"); + Ok(Box::new(rng)) + } +} + +impl FromRequestParts for BoxRepository { + type Rejection = ErrorWrapper; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + state: &TestState, + ) -> Result { + let repo = state.repository_factory.create().await?; + Ok(repo) + } +} + +impl FromRequestParts for Policy { + type Rejection = ErrorWrapper; + + async fn from_request_parts( + _parts: &mut axum::http::request::Parts, + state: &TestState, + ) -> Result { + let policy = state.policy_factory.instantiate().await?; + Ok(policy) + } +} + +pub(crate) trait RequestBuilderExt { + /// Builds the request with the given JSON value as body. + fn json(self, body: T) -> hyper::Request; + + /// Builds the request with the given form value as body. + fn form(self, body: T) -> hyper::Request; + + /// Sets the request Authorization header to the given bearer token. + fn bearer(self, token: &str) -> Self; + + /// Sets the request Authorization header to the given basic auth + /// credentials. + fn basic_auth(self, username: &str, password: &str) -> Self; + + /// Builds the request with an empty body. + fn empty(self) -> hyper::Request; +} + +impl RequestBuilderExt for hyper::http::request::Builder { + fn json(mut self, body: T) -> hyper::Request { + self.headers_mut() + .unwrap() + .typed_insert(ContentType::json()); + + self.body(serde_json::to_string(&body).unwrap()).unwrap() + } + + fn form(mut self, body: T) -> hyper::Request { + self.headers_mut() + .unwrap() + .typed_insert(ContentType::form_url_encoded()); + + self.body(serde_urlencoded::to_string(&body).unwrap()) + .unwrap() + } + + fn bearer(mut self, token: &str) -> Self { + self.headers_mut() + .unwrap() + .typed_insert(Authorization::bearer(token).unwrap()); + self + } + + fn basic_auth(mut self, username: &str, password: &str) -> Self { + self.headers_mut() + .unwrap() + .typed_insert(Authorization::basic(username, password)); + self + } + + fn empty(self) -> hyper::Request { + self.body(String::new()).unwrap() + } +} + +pub(crate) trait ResponseExt { + /// Asserts that the response has the given status code. + /// + /// # Panics + /// + /// Panics if the response has a different status code. + fn assert_status(&self, status: StatusCode); + + /// Asserts that the response has the given header value. + /// + /// # Panics + /// + /// Panics if the response does not have the given header or if the header + /// value does not match. + fn assert_header_value(&self, header: HeaderName, value: &str); + + /// Get the response body as JSON. + /// + /// # Panics + /// + /// Panics if the response is missing the `Content-Type: application/json`, + /// or if the body is not valid JSON. + fn json(&self) -> T; +} + +impl ResponseExt for Response { + #[track_caller] + fn assert_status(&self, status: StatusCode) { + assert_eq!( + self.status(), + status, + "HTTP status code mismatch: got {}, expected {}. Body: {}", + self.status(), + status, + self.body() + ); + } + + #[track_caller] + fn assert_header_value(&self, header: HeaderName, value: &str) { + let actual_value = self + .headers() + .get(&header) + .unwrap_or_else(|| panic!("Missing header {header}")); + + assert_eq!( + actual_value, + value, + "Header mismatch: got {:?}, expected {:?}", + self.headers().get(header), + value + ); + } + + #[track_caller] + fn json(&self) -> T { + self.assert_header_value(CONTENT_TYPE, "application/json"); + serde_json::from_str(self.body()).expect("JSON deserialization failed") + } +} + +/// A helper for storing and retrieving cookies in tests. +#[derive(Clone, Debug, Default)] +pub struct CookieHelper { + store: Arc>, +} + +impl CookieHelper { + pub fn new() -> Self { + Self::default() + } + + /// Inject the cookies from the store into the request. + pub fn with_cookies(&self, mut request: Request) -> Request { + let url = Url::options() + .base_url(Some(&"https://example.com/".parse().unwrap())) + .parse(&request.uri().to_string()) + .expect("Failed to parse URL"); + + let store = self.store.read().unwrap(); + let value = store + .get_request_values(&url) + .map(|(name, value)| format!("{name}={value}")) + .collect::>() + .join("; "); + + request.headers_mut().insert( + COOKIE, + HeaderValue::from_str(&value).expect("Invalid cookie value"), + ); + request + } + + /// Save the cookies from the response into the store. + pub fn save_cookies(&self, response: &Response) { + let url = "https://example.com/".parse().unwrap(); + let mut store = self.store.write().unwrap(); + store.store_response_cookies( + response + .headers() + .get_all(SET_COOKIE) + .iter() + .map(|set_cookie| { + RawCookie::parse( + set_cookie + .to_str() + .expect("Invalid set-cookie header") + .to_owned(), + ) + .expect("Invalid set-cookie header") + }), + &url, + ); + } + + pub fn import(&self, res: impl IntoResponseParts) { + let response = (res, "").into_response(); + self.save_cookies(&response); + } +} + +impl Layer for CookieHelper { + type Service = CookieStoreService; + + fn layer(&self, inner: S) -> Self::Service { + CookieStoreService { + helper: self.clone(), + inner, + } + } +} + +/// A middleware that stores and retrieves cookies. +pub struct CookieStoreService { + helper: CookieHelper, + inner: S, +} + +impl Service> for CookieStoreService +where + S: Service, Response = Response> + Send, + S::Future: Send + 'static, +{ + type Response = S::Response; + type Error = S::Error; + type Future = BoxFuture<'static, Result>; + + fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { + self.inner.poll_ready(cx) + } + + fn call(&mut self, request: Request) -> Self::Future { + let req = self.helper.with_cookies(request); + let inner = self.inner.call(req); + let helper = self.helper.clone(); + Box::pin(async move { + let response: Response<_> = inner.await?; + helper.save_cookies(&response); + Ok(response) + }) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/upstream_oauth2/authorize.rs b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/authorize.rs new file mode 100644 index 00000000..8749f3c3 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/authorize.rs @@ -0,0 +1,149 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + extract::{Path, State}, + response::{IntoResponse, Redirect}, +}; +use axum_extra::extract::Query; +use hyper::StatusCode; +use mas_axum_utils::{GenericError, InternalError, cookies::CookieJar}; +use mas_data_model::{BoxClock, BoxRng, UpstreamOAuthProvider}; +use mas_oidc_client::requests::authorization_code::AuthorizationRequestData; +use mas_router::{PostAuthAction, UrlBuilder}; +use mas_storage::{ + BoxRepository, + upstream_oauth2::{UpstreamOAuthProviderRepository, UpstreamOAuthSessionRepository}, +}; +use thiserror::Error; +use ulid::Ulid; + +use super::{UpstreamSessionsCookie, cache::LazyProviderInfos}; +use crate::{ + impl_from_error_for_route, upstream_oauth2::cache::MetadataCache, + views::shared::OptionalPostAuthAction, +}; + +#[derive(Debug, Error)] +pub(crate) enum RouteError { + #[error("Provider not found")] + ProviderNotFound, + + #[error(transparent)] + Internal(Box), +} + +impl_from_error_for_route!(mas_oidc_client::error::DiscoveryError); +impl_from_error_for_route!(mas_oidc_client::error::AuthorizationError); +impl_from_error_for_route!(mas_storage::RepositoryError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + match self { + e @ Self::ProviderNotFound => { + GenericError::new(StatusCode::NOT_FOUND, e).into_response() + } + Self::Internal(e) => InternalError::new(e).into_response(), + } + } +} + +#[tracing::instrument( + name = "handlers.upstream_oauth2.authorize.get", + fields(upstream_oauth_provider.id = %provider_id), + skip_all, +)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + State(metadata_cache): State, + mut repo: BoxRepository, + State(url_builder): State, + State(http_client): State, + cookie_jar: CookieJar, + Path(provider_id): Path, + Query(query): Query, +) -> Result { + let provider = repo + .upstream_oauth_provider() + .lookup(provider_id) + .await? + .filter(UpstreamOAuthProvider::enabled) + .ok_or(RouteError::ProviderNotFound)?; + + // First, discover the provider + // This is done lazyly according to provider.discovery_mode and the various + // endpoint overrides + let mut lazy_metadata = LazyProviderInfos::new(&metadata_cache, &provider, &http_client); + lazy_metadata.maybe_discover().await?; + + let redirect_uri = url_builder.upstream_oauth_callback(provider.id); + + let mut data = AuthorizationRequestData::new( + provider.client_id.clone(), + provider.scope.clone(), + redirect_uri, + ); + + if let Some(response_mode) = provider.response_mode { + data = data.with_response_mode(response_mode.into()); + } + + // Forward the raw login hint upstream for the provider to handle however it + // sees fit + if provider.forward_login_hint + && let Some(PostAuthAction::ContinueAuthorizationGrant { id }) = &query.post_auth_action + && let Some(login_hint) = repo + .oauth2_authorization_grant() + .lookup(*id) + .await? + .and_then(|grant| grant.login_hint) + { + data = data.with_login_hint(login_hint); + } + + let data = if let Some(methods) = lazy_metadata.pkce_methods().await? { + data.with_code_challenge_methods_supported(methods) + } else { + data + }; + + // Build an authorization request for it + let (mut url, data) = mas_oidc_client::requests::authorization_code::build_authorization_url( + lazy_metadata.authorization_endpoint().await?.clone(), + data, + &mut rng, + )?; + + // We do that in a block because params borrows url mutably + { + // Add any additional parameters to the query + let mut params = url.query_pairs_mut(); + for (key, value) in &provider.additional_authorization_parameters { + params.append_pair(key, value); + } + } + + let session = repo + .upstream_oauth_session() + .add( + &mut rng, + &clock, + &provider, + data.state.clone(), + data.code_challenge_verifier, + data.nonce, + ) + .await?; + + let cookie_jar = UpstreamSessionsCookie::load(&cookie_jar) + .add(session.id, provider.id, data.state, query.post_auth_action) + .save(cookie_jar, &clock); + + repo.save().await?; + + Ok((cookie_jar, Redirect::temporary(url.as_str()))) +} diff --git a/matrix-authentication-service/crates/handlers/src/upstream_oauth2/backchannel_logout.rs b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/backchannel_logout.rs new file mode 100644 index 00000000..254a4d01 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/backchannel_logout.rs @@ -0,0 +1,319 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::{HashMap, HashSet}; + +use axum::{ + Form, Json, + extract::{Path, State, rejection::FormRejection}, + response::IntoResponse, +}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::{ + BoxClock, BoxRng, UpstreamOAuthProvider, UpstreamOAuthProviderOnBackchannelLogout, +}; +use mas_jose::{ + claims::{self, Claim, TimeOptions}, + jwt::JwtDecodeError, +}; +use mas_oidc_client::{ + error::JwtVerificationError, + requests::jose::{JwtVerificationData, verify_signed_jwt}, +}; +use mas_storage::{ + BoxRepository, Pagination, + compat::CompatSessionFilter, + oauth2::OAuth2SessionFilter, + queue::{QueueJobRepositoryExt as _, SyncDevicesJob}, + upstream_oauth2::UpstreamOAuthSessionFilter, + user::BrowserSessionFilter, +}; +use oauth2_types::errors::{ClientError, ClientErrorCode}; +use serde::Deserialize; +use serde_json::Value; +use thiserror::Error; +use ulid::Ulid; + +use crate::{MetadataCache, impl_from_error_for_route, upstream_oauth2::cache::LazyProviderInfos}; + +#[derive(Debug, Error)] +pub enum RouteError { + /// An internal error occurred. + #[error(transparent)] + Internal(Box), + + /// Invalid request body + #[error(transparent)] + InvalidRequestBody(#[from] FormRejection), + + /// Logout token is not a JWT + #[error("failed to decode logout token")] + InvalidLogoutToken(#[from] JwtDecodeError), + + /// Logout token failed to be verified + #[error("failed to verify logout token")] + LogoutTokenVerification(#[from] JwtVerificationError), + + /// Logout token had invalid claims + #[error("invalid claims in logout token")] + InvalidLogoutTokenClaims(#[from] claims::ClaimError), + + /// Logout token has neither a sub nor a sid claim + #[error("logout token has neither a sub nor a sid claim")] + NoSubOrSidClaim, + + /// Provider not found + #[error("provider not found")] + ProviderNotFound, +} + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!(self, Self::Internal(_)); + + let response = match self { + e @ Self::Internal(_) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json( + ClientError::from(ClientErrorCode::ServerError).with_description(e.to_string()), + ), + ) + .into_response(), + + e @ (Self::InvalidLogoutToken(_) + | Self::LogoutTokenVerification(_) + | Self::InvalidRequestBody(_) + | Self::InvalidLogoutTokenClaims(_) + | Self::NoSubOrSidClaim) => ( + StatusCode::BAD_REQUEST, + Json( + ClientError::from(ClientErrorCode::InvalidRequest) + .with_description(e.to_string()), + ), + ) + .into_response(), + + Self::ProviderNotFound => ( + StatusCode::NOT_FOUND, + Json( + ClientError::from(ClientErrorCode::InvalidRequest).with_description( + "Upstream OAuth provider not found, is the backchannel logout URI right?" + .to_owned(), + ), + ), + ) + .into_response(), + }; + + (sentry_event_id, response).into_response() + } +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_oidc_client::error::DiscoveryError); +impl_from_error_for_route!(mas_oidc_client::error::JwksError); + +#[derive(Deserialize)] +pub(crate) struct BackchannelLogoutRequest { + logout_token: String, +} + +#[derive(Deserialize)] +struct LogoutTokenEvents { + #[allow(dead_code)] // We just want to check it deserializes + #[serde(rename = "http://schemas.openid.net/event/backchannel-logout")] + backchannel_logout: HashMap, +} + +const EVENTS: Claim = Claim::new("events"); + +#[tracing::instrument( + name = "handlers.upstream_oauth2.backchannel_logout.post", + fields(upstream_oauth_provider.id = %provider_id), + skip_all, +)] +pub(crate) async fn post( + clock: BoxClock, + mut rng: BoxRng, + mut repo: BoxRepository, + State(metadata_cache): State, + State(client): State, + Path(provider_id): Path, + request: Result, FormRejection>, +) -> Result { + let Form(request) = request?; + let provider = repo + .upstream_oauth_provider() + .lookup(provider_id) + .await? + .filter(UpstreamOAuthProvider::enabled) + .ok_or(RouteError::ProviderNotFound)?; + + let mut lazy_metadata = LazyProviderInfos::new(&metadata_cache, &provider, &client); + + let jwks = + mas_oidc_client::requests::jose::fetch_jwks(&client, lazy_metadata.jwks_uri().await?) + .await?; + + // Validate the logout token. The rules are defined in + // + // + // Upon receiving a logout request at the back-channel logout URI, the RP MUST + // validate the Logout Token as follows: + // + // 1. If the Logout Token is encrypted, decrypt it using the keys and + // algorithms that the Client specified during Registration that the OP was + // to use to encrypt ID Tokens. If ID Token encryption was negotiated with + // the OP at Registration time and the Logout Token is not encrypted, the RP + // SHOULD reject it. + // 2. Validate the Logout Token signature in the same way that an ID Token + // signature is validated, with the following refinements. + // 3. Validate the alg (algorithm) Header Parameter in the same way it is + // validated for ID Tokens. Like ID Tokens, selection of the algorithm used + // is governed by the id_token_signing_alg_values_supported Discovery + // parameter and the id_token_signed_response_alg Registration parameter + // when they are used; otherwise, the value SHOULD be the default of RS256. + // Additionally, an alg with the value none MUST NOT be used for Logout + // Tokens. + // 4. Validate the iss, aud, iat, and exp Claims in the same way they are + // validated in ID Tokens. + // 5. Verify that the Logout Token contains a sub Claim, a sid Claim, or both. + // 6. Verify that the Logout Token contains an events Claim whose value is JSON + // object containing the member name http://schemas.openid.net/event/backchannel-logout. + // 7. Verify that the Logout Token does not contain a nonce Claim. + // 8. Optionally verify that another Logout Token with the same jti value has + // not been recently received. + // 9. Optionally verify that the iss Logout Token Claim matches the iss Claim + // in an ID Token issued for the current session or a recent session of this + // RP with the OP. + // 10. Optionally verify that any sub Logout Token Claim matches the sub Claim + // in an ID Token issued for the current session or a recent session of + // this RP with the OP. + // 11. Optionally verify that any sid Logout Token Claim matches the sid Claim + // in an ID Token issued for the current session or a recent session of + // this RP with the OP. + // + // If any of the validation steps fails, reject the Logout Token and return an + // HTTP 400 Bad Request error. Otherwise, proceed to perform the logout actions. + // + // The ISS and AUD claims are already checked by the verify_signed_jwt() + // function. + + // This verifies (1), (2), (3) and the iss and aud claims for (4) + let token = verify_signed_jwt( + &request.logout_token, + JwtVerificationData { + issuer: provider.issuer.as_deref(), + jwks: &jwks, + client_id: &provider.client_id, + signing_algorithm: &provider.id_token_signed_response_alg, + }, + )?; + + let (_header, mut claims) = token.into_parts(); + + let time_options = TimeOptions::new(clock.now()); + claims::EXP.extract_required_with_options(&mut claims, &time_options)?; // (4) + claims::IAT.extract_required_with_options(&mut claims, &time_options)?; // (4) + + let sub = claims::SUB.extract_optional(&mut claims)?; // (5) + let sid = claims::SID.extract_optional(&mut claims)?; // (5) + if sub.is_none() && sid.is_none() { + return Err(RouteError::NoSubOrSidClaim); + } + + EVENTS.extract_required(&mut claims)?; // (6) + claims::NONCE.assert_absent(&claims)?; // (7) + + // Find the corresponding upstream OAuth 2.0 sessions + let mut auth_session_filter = UpstreamOAuthSessionFilter::new().for_provider(&provider); + if let Some(sub) = &sub { + auth_session_filter = auth_session_filter.with_sub_claim(sub); + } + if let Some(sid) = &sid { + auth_session_filter = auth_session_filter.with_sid_claim(sid); + } + let count = repo + .upstream_oauth_session() + .count(auth_session_filter) + .await?; + + tracing::info!(sub, sid, %provider.id, "Backchannel logout received, found {count} corresponding authentication sessions"); + + match provider.on_backchannel_logout { + UpstreamOAuthProviderOnBackchannelLogout::DoNothing => { + tracing::warn!(%provider.id, "Provider configured to do nothing on backchannel logout"); + } + UpstreamOAuthProviderOnBackchannelLogout::LogoutBrowserOnly => { + let filter = BrowserSessionFilter::new() + .linked_to_upstream_sessions_only(auth_session_filter) + .active_only(); + let affected = repo.browser_session().finish_bulk(&clock, filter).await?; + tracing::info!("Finished {affected} browser sessions"); + } + UpstreamOAuthProviderOnBackchannelLogout::LogoutAll => { + let browser_session_filter = + BrowserSessionFilter::new().linked_to_upstream_sessions_only(auth_session_filter); + + // We need to loop through all the browser sessions to find all the + // users affected so that we can trigger a device sync job for them + let mut cursor = Pagination::first(1000); + let mut user_ids = HashSet::new(); + loop { + let browser_sessions = repo + .browser_session() + .list(browser_session_filter, cursor) + .await?; + for edge in browser_sessions.edges { + user_ids.insert(edge.node.user.id); + cursor = cursor.after(edge.cursor); + } + + if !browser_sessions.has_next_page { + break; + } + } + + let browser_sessions_affected = repo + .browser_session() + .finish_bulk(&clock, browser_session_filter.active_only()) + .await?; + + let oauth2_session_filter = OAuth2SessionFilter::new() + .active_only() + .for_browser_sessions(browser_session_filter); + + let oauth2_sessions_affected = repo + .oauth2_session() + .finish_bulk(&clock, oauth2_session_filter) + .await?; + + let compat_session_filter = CompatSessionFilter::new() + .active_only() + .for_browser_sessions(browser_session_filter); + + let compat_sessions_affected = repo + .compat_session() + .finish_bulk(&clock, compat_session_filter) + .await?; + + tracing::info!( + "Finished {browser_sessions_affected} browser sessions, {oauth2_sessions_affected} OAuth 2.0 sessions and {compat_sessions_affected} compatibility sessions" + ); + + for user_id in user_ids { + tracing::info!(user.id = %user_id, "Queueing a device sync job for user"); + let job = SyncDevicesJob::new_for_id(user_id); + repo.queue_job().schedule_job(&mut rng, &clock, job).await?; + } + } + } + + repo.save().await?; + + Ok(()) +} diff --git a/matrix-authentication-service/crates/handlers/src/upstream_oauth2/cache.rs b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/cache.rs new file mode 100644 index 00000000..0857bc2c --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/cache.rs @@ -0,0 +1,534 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, sync::Arc}; + +use mas_context::LogContext; +use mas_data_model::{ + UpstreamOAuthProvider, UpstreamOAuthProviderDiscoveryMode, UpstreamOAuthProviderPkceMode, +}; +use mas_iana::oauth::PkceCodeChallengeMethod; +use mas_oidc_client::error::DiscoveryError; +use mas_storage::{RepositoryAccess, upstream_oauth2::UpstreamOAuthProviderRepository}; +use oauth2_types::oidc::VerifiedProviderMetadata; +use tokio::sync::RwLock; +use url::Url; + +/// A high-level layer over metadata cache and provider configuration, which +/// resolves endpoint overrides and discovery modes. +pub struct LazyProviderInfos<'a> { + cache: &'a MetadataCache, + provider: &'a UpstreamOAuthProvider, + client: &'a reqwest::Client, + loaded_metadata: Option>, +} + +impl<'a> LazyProviderInfos<'a> { + pub fn new( + cache: &'a MetadataCache, + provider: &'a UpstreamOAuthProvider, + client: &'a reqwest::Client, + ) -> Self { + Self { + cache, + provider, + client, + loaded_metadata: None, + } + } + + /// Trigger the discovery process and return the metadata if discovery is + /// enabled. + pub async fn maybe_discover( + &mut self, + ) -> Result, DiscoveryError> { + match self.load().await { + Ok(metadata) => Ok(Some(metadata)), + Err(DiscoveryError::Disabled) => Ok(None), + Err(e) => Err(e), + } + } + + async fn load(&mut self) -> Result<&VerifiedProviderMetadata, DiscoveryError> { + if self.loaded_metadata.is_none() { + let verify = match self.provider.discovery_mode { + UpstreamOAuthProviderDiscoveryMode::Oidc => true, + UpstreamOAuthProviderDiscoveryMode::Insecure => false, + UpstreamOAuthProviderDiscoveryMode::Disabled => { + return Err(DiscoveryError::Disabled); + } + }; + + let Some(issuer) = &self.provider.issuer else { + return Err(DiscoveryError::MissingIssuer); + }; + + let metadata = self.cache.get(self.client, issuer, verify).await?; + + self.loaded_metadata = Some(metadata); + } + + Ok(self.loaded_metadata.as_ref().unwrap()) + } + + /// Get the JWKS URI for the provider. + /// + /// Uses [`UpstreamOAuthProvider.jwks_uri_override`] if set, otherwise uses + /// the one from discovery. + pub async fn jwks_uri(&mut self) -> Result<&Url, DiscoveryError> { + if let Some(jwks_uri) = &self.provider.jwks_uri_override { + return Ok(jwks_uri); + } + + Ok(self.load().await?.jwks_uri()) + } + + /// Get the authorization endpoint for the provider. + /// + /// Uses [`UpstreamOAuthProvider.authorization_endpoint_override`] if set, + /// otherwise uses the one from discovery. + pub async fn authorization_endpoint(&mut self) -> Result<&Url, DiscoveryError> { + if let Some(authorization_endpoint) = &self.provider.authorization_endpoint_override { + return Ok(authorization_endpoint); + } + + Ok(self.load().await?.authorization_endpoint()) + } + + /// Get the token endpoint for the provider. + /// + /// Uses [`UpstreamOAuthProvider.token_endpoint_override`] if set, otherwise + /// uses the one from discovery. + pub async fn token_endpoint(&mut self) -> Result<&Url, DiscoveryError> { + if let Some(token_endpoint) = &self.provider.token_endpoint_override { + return Ok(token_endpoint); + } + + Ok(self.load().await?.token_endpoint()) + } + + /// Get the userinfo endpoint for the provider. + /// + /// Uses [`UpstreamOAuthProvider.userinfo_endpoint_override`] if set, + /// otherwise uses the one from discovery. + pub async fn userinfo_endpoint(&mut self) -> Result<&Url, DiscoveryError> { + if let Some(userinfo_endpoint) = &self.provider.userinfo_endpoint_override { + return Ok(userinfo_endpoint); + } + + Ok(self.load().await?.userinfo_endpoint()) + } + + /// Get the PKCE methods supported by the provider. + /// + /// If the mode is set to auto, it will use the ones from discovery, + /// defaulting to none if discovery is disabled. + pub async fn pkce_methods( + &mut self, + ) -> Result>, DiscoveryError> { + let methods = match self.provider.pkce_mode { + UpstreamOAuthProviderPkceMode::Auto => self + .maybe_discover() + .await? + .and_then(|metadata| metadata.code_challenge_methods_supported.clone()), + UpstreamOAuthProviderPkceMode::S256 => Some(vec![PkceCodeChallengeMethod::S256]), + UpstreamOAuthProviderPkceMode::Disabled => None, + }; + + Ok(methods) + } +} + +/// A simple OIDC metadata cache +/// +/// It never evicts entries, does not cache failures and has no locking. +/// It can also be refreshed in the background, and warmed up on startup. +/// It is good enough for our use case. +#[allow(clippy::module_name_repetitions)] +#[derive(Debug, Clone, Default)] +pub struct MetadataCache { + cache: Arc>>>, + insecure_cache: Arc>>>, +} + +impl MetadataCache { + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Warm up the cache by fetching all the known providers from the database + /// and inserting them into the cache. + /// + /// This spawns a background task that will refresh the cache at the given + /// interval. + /// + /// # Errors + /// + /// Returns an error if the warm up task could not be started. + #[tracing::instrument(name = "metadata_cache.warm_up_and_run", skip_all)] + pub async fn warm_up_and_run( + &self, + client: &reqwest::Client, + interval: std::time::Duration, + repository: &mut R, + ) -> Result, R::Error> { + let providers = repository.upstream_oauth_provider().all_enabled().await?; + + for provider in providers { + let verify = match provider.discovery_mode { + UpstreamOAuthProviderDiscoveryMode::Oidc => true, + UpstreamOAuthProviderDiscoveryMode::Insecure => false, + UpstreamOAuthProviderDiscoveryMode::Disabled => continue, + }; + + let Some(issuer) = &provider.issuer else { + tracing::error!(%provider.id, "Provider doesn't have an issuer set, but discovery is enabled!"); + continue; + }; + + if let Err(e) = self.fetch(client, issuer, verify).await { + tracing::error!(%issuer, error = &e as &dyn std::error::Error, "Failed to fetch provider metadata"); + } + } + + // Spawn a background task to refresh the cache regularly + let cache = self.clone(); + let client = client.clone(); + Ok(tokio::spawn(async move { + loop { + // Re-fetch the known metadata at the given interval + tokio::time::sleep(interval).await; + LogContext::new("metadata-cache-refresh") + .run(|| cache.refresh_all(&client)) + .await; + } + })) + } + + #[tracing::instrument(name = "metadata_cache.fetch", fields(%issuer), skip_all)] + async fn fetch( + &self, + client: &reqwest::Client, + issuer: &str, + verify: bool, + ) -> Result, DiscoveryError> { + if verify { + let metadata = mas_oidc_client::requests::discovery::discover(client, issuer).await?; + let metadata = Arc::new(metadata); + + self.cache + .write() + .await + .insert(issuer.to_owned(), metadata.clone()); + + Ok(metadata) + } else { + let metadata = + mas_oidc_client::requests::discovery::insecure_discover(client, issuer).await?; + let metadata = Arc::new(metadata); + + self.insecure_cache + .write() + .await + .insert(issuer.to_owned(), metadata.clone()); + + Ok(metadata) + } + } + + /// Get the metadata for the given issuer. + /// + /// # Errors + /// + /// Returns an error if the metadata could not be retrieved. + #[tracing::instrument(name = "metadata_cache.get", fields(%issuer), skip_all)] + pub async fn get( + &self, + client: &reqwest::Client, + issuer: &str, + verify: bool, + ) -> Result, DiscoveryError> { + let cache = if verify { + self.cache.read().await + } else { + self.insecure_cache.read().await + }; + + if let Some(metadata) = cache.get(issuer) { + return Ok(Arc::clone(metadata)); + } + // Drop the cache guard so that we don't deadlock when we try to fetch + drop(cache); + + let metadata = self.fetch(client, issuer, verify).await?; + Ok(metadata) + } + + #[tracing::instrument(name = "metadata_cache.refresh_all", skip_all)] + async fn refresh_all(&self, client: &reqwest::Client) { + // Grab all the keys first to avoid locking the cache for too long + let keys: Vec = { + let cache = self.cache.read().await; + cache.keys().cloned().collect() + }; + + for issuer in keys { + if let Err(e) = self.fetch(client, &issuer, true).await { + tracing::error!(issuer = %issuer, error = &e as &dyn std::error::Error, "Failed to refresh provider metadata"); + } + } + + // Do the same for the insecure cache + let keys: Vec = { + let cache = self.insecure_cache.read().await; + cache.keys().cloned().collect() + }; + + for issuer in keys { + if let Err(e) = self.fetch(client, &issuer, false).await { + tracing::error!(issuer = %issuer, error = &e as &dyn std::error::Error, "Failed to refresh provider metadata"); + } + } + } +} + +#[cfg(test)] +mod tests { + // XXX: sadly, we can't test HTTPS requests with wiremock, so we can only test + // 'insecure' discovery + + use mas_data_model::{ + Clock, UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderTokenAuthMethod, clock::MockClock, + }; + use mas_iana::jose::JsonWebSignatureAlg; + use oauth2_types::scope::{OPENID, Scope}; + use ulid::Ulid; + use wiremock::{ + Mock, MockServer, ResponseTemplate, + matchers::{method, path}, + }; + + use super::*; + use crate::test_utils::setup; + + #[tokio::test] + async fn test_metadata_cache() { + setup(); + let mock_server = MockServer::start().await; + let http_client = mas_http::reqwest_client(); + + let cache = MetadataCache::new(); + + // An inexistant issuer should fail + cache + .get(&http_client, &mock_server.uri(), false) + .await + .unwrap_err(); + + let expected_calls = 3; + let mut calls = 0; + let _mock_guard = Mock::given(method("GET")) + .and(path("/.well-known/openid-configuration")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "issuer": mock_server.uri(), + "authorization_endpoint": "https://example.com/authorize", + "token_endpoint": "https://example.com/token", + "jwks_uri": "https://example.com/jwks", + "userinfo_endpoint": "https://example.com/userinfo", + "scopes_supported": ["openid"], + "response_types_supported": ["code"], + "response_modes_supported": ["query", "fragment"], + "grant_types_supported": ["authorization_code"], + "subject_types_supported": ["public"], + "id_token_signing_alg_values_supported": ["RS256"], + }))) + .expect(expected_calls) + .mount(&mock_server) + .await; + + // A valid issuer should succeed + cache + .get(&http_client, &mock_server.uri(), false) + .await + .unwrap(); + calls += 1; + + // Calling again should not trigger a new fetch + cache + .get(&http_client, &mock_server.uri(), false) + .await + .unwrap(); + calls += 0; + + // A secure discovery should call but fail because the issuer is insecure + cache + .get(&http_client, &mock_server.uri(), true) + .await + .unwrap_err(); + calls += 1; + + // Calling refresh should refresh all the known issuers + cache.refresh_all(&http_client).await; + calls += 1; + + assert_eq!(calls, expected_calls); + } + + #[tokio::test] + async fn test_lazy_provider_infos() { + setup(); + + let mock_server = MockServer::start().await; + let http_client = mas_http::reqwest_client(); + + let expected_calls = 2; + let mut calls = 0; + let _mock_guard = Mock::given(method("GET")) + .and(path("/.well-known/openid-configuration")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "issuer": mock_server.uri(), + "authorization_endpoint": "https://example.com/authorize", + "token_endpoint": "https://example.com/token", + "jwks_uri": "https://example.com/jwks", + "userinfo_endpoint": "https://example.com/userinfo", + "scopes_supported": ["openid"], + "response_types_supported": ["code"], + "response_modes_supported": ["query", "fragment"], + "grant_types_supported": ["authorization_code"], + "subject_types_supported": ["public"], + "id_token_signing_alg_values_supported": ["RS256"], + }))) + .expect(expected_calls) + .mount(&mock_server) + .await; + + let clock = MockClock::default(); + let provider = UpstreamOAuthProvider { + id: Ulid::nil(), + issuer: Some(mock_server.uri()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Insecure, + pkce_mode: UpstreamOAuthProviderPkceMode::Auto, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + jwks_uri_override: None, + authorization_endpoint_override: None, + scope: Scope::from_iter([OPENID]), + userinfo_endpoint_override: None, + token_endpoint_override: None, + client_id: "client_id".to_owned(), + encrypted_client_secret: None, + token_endpoint_signing_alg: None, + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + response_mode: None, + created_at: clock.now(), + disabled_at: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }; + + // Without any override, it should just use discovery + { + let cache = MetadataCache::new(); + let mut lazy_metadata = LazyProviderInfos::new(&cache, &provider, &http_client); + lazy_metadata.maybe_discover().await.unwrap(); + assert_eq!( + lazy_metadata + .authorization_endpoint() + .await + .unwrap() + .as_str(), + "https://example.com/authorize" + ); + calls += 1; + } + + // Test overriding endpoints + { + let provider = UpstreamOAuthProvider { + jwks_uri_override: Some("https://example.com/jwks_override".parse().unwrap()), + authorization_endpoint_override: Some( + "https://example.com/authorize_override".parse().unwrap(), + ), + token_endpoint_override: Some( + "https://example.com/token_override".parse().unwrap(), + ), + ..provider.clone() + }; + let cache = MetadataCache::new(); + let mut lazy_metadata = LazyProviderInfos::new(&cache, &provider, &http_client); + assert_eq!( + lazy_metadata.jwks_uri().await.unwrap().as_str(), + "https://example.com/jwks_override" + ); + assert_eq!( + lazy_metadata + .authorization_endpoint() + .await + .unwrap() + .as_str(), + "https://example.com/authorize_override" + ); + assert_eq!( + lazy_metadata.token_endpoint().await.unwrap().as_str(), + "https://example.com/token_override" + ); + // This shouldn't trigger a new fetch as the endpoint is overriden + calls += 0; + } + + // Loading an insecure provider with secure discovery should fail + { + let provider = UpstreamOAuthProvider { + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Oidc, + ..provider.clone() + }; + let cache = MetadataCache::new(); + let mut lazy_metadata = LazyProviderInfos::new(&cache, &provider, &http_client); + lazy_metadata.authorization_endpoint().await.unwrap_err(); + // This triggered a fetch, even though it failed + calls += 1; + } + + // Getting endpoints when discovery is disabled only works for overriden ones + { + let provider = UpstreamOAuthProvider { + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Disabled, + authorization_endpoint_override: Some( + Url::parse("https://example.com/authorize_override").unwrap(), + ), + token_endpoint_override: None, + ..provider.clone() + }; + let cache = MetadataCache::new(); + let mut lazy_metadata = LazyProviderInfos::new(&cache, &provider, &http_client); + // This should not fail, but also does nothing + assert!(lazy_metadata.maybe_discover().await.unwrap().is_none()); + assert_eq!( + lazy_metadata + .authorization_endpoint() + .await + .unwrap() + .as_str(), + "https://example.com/authorize_override" + ); + assert!(matches!( + lazy_metadata.token_endpoint().await, + Err(DiscoveryError::Disabled), + )); + // This did not trigger a fetch + calls += 0; + } + + assert_eq!(calls, expected_calls); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/upstream_oauth2/callback.rs b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/callback.rs new file mode 100644 index 00000000..7fdaf13a --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/callback.rs @@ -0,0 +1,501 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::LazyLock; + +use axum::{ + Form, + extract::{Path, State}, + http::Method, + response::{Html, IntoResponse, Response}, +}; +use hyper::StatusCode; +use mas_axum_utils::{GenericError, InternalError, cookies::CookieJar}; +use mas_data_model::{ + BoxClock, BoxRng, Clock, UpstreamOAuthProvider, UpstreamOAuthProviderResponseMode, +}; +use mas_jose::claims::TokenHash; +use mas_keystore::{Encrypter, Keystore}; +use mas_oidc_client::requests::jose::JwtVerificationData; +use mas_router::UrlBuilder; +use mas_storage::{ + BoxRepository, + upstream_oauth2::{ + UpstreamOAuthLinkRepository, UpstreamOAuthProviderRepository, + UpstreamOAuthSessionRepository, + }, +}; +use mas_templates::{FormPostContext, Templates}; +use oauth2_types::{errors::ClientErrorCode, requests::AccessTokenRequest}; +use opentelemetry::{Key, KeyValue, metrics::Counter}; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use thiserror::Error; +use ulid::Ulid; + +use super::{ + UpstreamSessionsCookie, + cache::LazyProviderInfos, + client_credentials_for_provider, + template::{AttributeMappingContext, environment}, +}; +use crate::{ + METER, PreferredLanguage, impl_from_error_for_route, upstream_oauth2::cache::MetadataCache, +}; + +static CALLBACK_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.upstream_oauth2.callback") + .with_description("Number of requests to the upstream OAuth2 callback endpoint") + .build() +}); +const PROVIDER: Key = Key::from_static_str("provider"); +const RESULT: Key = Key::from_static_str("result"); + +#[derive(Serialize, Deserialize)] +pub struct Params { + #[serde(skip_serializing_if = "Option::is_none")] + state: Option, + + /// An extra parameter to track whether the POST request was re-made by us + /// to the same URL to escape Same-Site cookies restrictions + #[serde(default)] + did_mas_repost_to_itself: bool, + + #[serde(skip_serializing_if = "Option::is_none")] + code: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + error_description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + error_uri: Option, + + #[serde(flatten)] + extra_callback_parameters: Option, +} + +impl Params { + /// Returns true if none of the fields are set + pub fn is_empty(&self) -> bool { + self.state.is_none() + && self.code.is_none() + && self.error.is_none() + && self.error_description.is_none() + && self.error_uri.is_none() + } +} + +#[derive(Debug, Error)] +pub(crate) enum RouteError { + #[error("Session not found")] + SessionNotFound, + + #[error("Provider not found")] + ProviderNotFound, + + #[error("Provider mismatch")] + ProviderMismatch, + + #[error("Session already completed")] + AlreadyCompleted, + + #[error("State parameter mismatch")] + StateMismatch, + + #[error("Missing state parameter")] + MissingState, + + #[error("Missing code parameter")] + MissingCode, + + #[error("Could not extract subject from ID token")] + ExtractSubject(#[source] minijinja::Error), + + #[error("Subject is empty")] + EmptySubject, + + #[error("Error from the provider: {error}")] + ClientError { + error: ClientErrorCode, + error_description: Option, + }, + + #[error("Missing session cookie")] + MissingCookie, + + #[error("Missing query parameters")] + MissingQueryParams, + + #[error("Missing form parameters")] + MissingFormParams, + + #[error("Invalid response mode, expected '{expected}'")] + InvalidResponseMode { + expected: UpstreamOAuthProviderResponseMode, + }, + + #[error(transparent)] + Internal(Box), +} + +impl_from_error_for_route!(mas_templates::TemplateError); +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_oidc_client::error::DiscoveryError); +impl_from_error_for_route!(mas_oidc_client::error::JwksError); +impl_from_error_for_route!(mas_oidc_client::error::TokenRequestError); +impl_from_error_for_route!(mas_oidc_client::error::IdTokenError); +impl_from_error_for_route!(mas_oidc_client::error::UserInfoError); +impl_from_error_for_route!(super::ProviderCredentialsError); +impl_from_error_for_route!(super::cookie::UpstreamSessionNotFound); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + match self { + Self::Internal(e) => InternalError::new(e).into_response(), + e @ (Self::ProviderNotFound | Self::SessionNotFound) => { + GenericError::new(StatusCode::NOT_FOUND, e).into_response() + } + e => GenericError::new(StatusCode::BAD_REQUEST, e).into_response(), + } + } +} + +#[tracing::instrument( + name = "handlers.upstream_oauth2.callback.handler", + fields(upstream_oauth_provider.id = %provider_id), + skip_all, +)] +#[allow(clippy::too_many_arguments)] +pub(crate) async fn handler( + mut rng: BoxRng, + clock: BoxClock, + State(metadata_cache): State, + mut repo: BoxRepository, + State(url_builder): State, + State(encrypter): State, + State(keystore): State, + State(client): State, + State(templates): State, + method: Method, + PreferredLanguage(locale): PreferredLanguage, + cookie_jar: CookieJar, + Path(provider_id): Path, + Form(params): Form, +) -> Result { + let provider = repo + .upstream_oauth_provider() + .lookup(provider_id) + .await? + .filter(UpstreamOAuthProvider::enabled) + .ok_or(RouteError::ProviderNotFound)?; + + let sessions_cookie = UpstreamSessionsCookie::load(&cookie_jar); + + if params.is_empty() { + if let Method::GET = method { + return Err(RouteError::MissingQueryParams); + } + + return Err(RouteError::MissingFormParams); + } + + // The `Form` extractor will use the body of the request for POST requests and + // the query parameters for GET requests. We need to then look at the method do + // make sure it matches the expected `response_mode` + match (provider.response_mode, method) { + (Some(UpstreamOAuthProviderResponseMode::FormPost) | None, Method::POST) => { + // We set the cookies with a `Same-Site` policy set to `Lax`, so because this is + // usually a cross-site form POST, we need to render a form with the + // same values, which posts back to the same URL. However, there are + // other valid reasons for the cookie to be missing, so to track whether we did + // this POST ourselves, we set a flag. + if sessions_cookie.is_empty() && !params.did_mas_repost_to_itself { + let params = Params { + did_mas_repost_to_itself: true, + ..params + }; + let context = FormPostContext::new_for_current_url(params).with_language(&locale); + let html = templates.render_form_post(&context)?; + return Ok(Html(html).into_response()); + } + } + (None, _) | (Some(UpstreamOAuthProviderResponseMode::Query), Method::GET) => {} + (Some(expected), _) => return Err(RouteError::InvalidResponseMode { expected }), + } + + if let Some(error) = params.error { + CALLBACK_COUNTER.add( + 1, + &[ + KeyValue::new(PROVIDER, provider_id.to_string()), + KeyValue::new(RESULT, "error"), + ], + ); + + return Err(RouteError::ClientError { + error, + error_description: params.error_description.clone(), + }); + } + + let Some(state) = params.state else { + return Err(RouteError::MissingState); + }; + + let (session_id, _post_auth_action) = sessions_cookie + .find_session(provider_id, &state) + .map_err(|_| RouteError::MissingCookie)?; + + let session = repo + .upstream_oauth_session() + .lookup(session_id) + .await? + .ok_or(RouteError::SessionNotFound)?; + + if provider.id != session.provider_id { + // The provider in the session cookie should match the one from the URL + return Err(RouteError::ProviderMismatch); + } + + if state != session.state_str { + // The state in the session cookie should match the one from the params + return Err(RouteError::StateMismatch); + } + + if !session.is_pending() { + // The session was already completed + return Err(RouteError::AlreadyCompleted); + } + + // Let's extract the code from the params, and return if there was an error + let Some(code) = params.code else { + return Err(RouteError::MissingCode); + }; + + CALLBACK_COUNTER.add( + 1, + &[ + KeyValue::new(PROVIDER, provider_id.to_string()), + KeyValue::new(RESULT, "success"), + ], + ); + + let mut lazy_metadata = LazyProviderInfos::new(&metadata_cache, &provider, &client); + + // Figure out the client credentials + let client_credentials = client_credentials_for_provider( + &provider, + lazy_metadata.token_endpoint().await?, + &keystore, + &encrypter, + )?; + + let redirect_uri = url_builder.upstream_oauth_callback(provider.id); + + let token_response = mas_oidc_client::requests::token::request_access_token( + &client, + client_credentials, + lazy_metadata.token_endpoint().await?, + AccessTokenRequest::AuthorizationCode(oauth2_types::requests::AuthorizationCodeGrant { + code: code.clone(), + redirect_uri: Some(redirect_uri), + code_verifier: session.code_challenge_verifier.clone(), + }), + clock.now(), + &mut rng, + ) + .await?; + + let mut jwks = None; + let mut id_token_claims = None; + + let mut context = AttributeMappingContext::new(); + if let Some(id_token) = token_response.id_token.as_ref() { + jwks = Some( + mas_oidc_client::requests::jose::fetch_jwks(&client, lazy_metadata.jwks_uri().await?) + .await?, + ); + + let id_token_verification_data = JwtVerificationData { + issuer: provider.issuer.as_deref(), + jwks: jwks.as_ref().unwrap(), + signing_algorithm: &provider.id_token_signed_response_alg, + client_id: &provider.client_id, + }; + + // Decode and verify the ID token + let id_token = mas_oidc_client::requests::jose::verify_id_token( + id_token, + id_token_verification_data, + None, + clock.now(), + )?; + + let (_headers, mut claims) = id_token.into_parts(); + + // Save a copy of the claims for later; the claims extract methods + // remove them from the map, and we want to store the original claims. + // We anyway need this to be a serde_json::Value + id_token_claims = Some( + serde_json::to_value(&claims) + .expect("serializing a HashMap into a Value should never fail"), + ); + + // Access token hash must match. + mas_jose::claims::AT_HASH + .extract_optional_with_options( + &mut claims, + TokenHash::new( + id_token_verification_data.signing_algorithm, + &token_response.access_token, + ), + ) + .map_err(mas_oidc_client::error::IdTokenError::from)?; + + // Code hash must match. + mas_jose::claims::C_HASH + .extract_optional_with_options( + &mut claims, + TokenHash::new(id_token_verification_data.signing_algorithm, &code), + ) + .map_err(mas_oidc_client::error::IdTokenError::from)?; + + // Nonce must match if present. + if let Some(nonce) = session.nonce.as_deref() { + mas_jose::claims::NONCE + .extract_required_with_options(&mut claims, nonce) + .map_err(mas_oidc_client::error::IdTokenError::from)?; + } + + context = context.with_id_token_claims(claims); + } + + if let Some(extra_callback_parameters) = params.extra_callback_parameters.clone() { + context = context.with_extra_callback_parameters(extra_callback_parameters); + } + + let userinfo = if provider.fetch_userinfo { + Some(json!(match &provider.userinfo_signed_response_alg { + Some(signing_algorithm) => { + let jwks = match jwks { + Some(jwks) => jwks, + None => { + mas_oidc_client::requests::jose::fetch_jwks( + &client, + lazy_metadata.jwks_uri().await?, + ) + .await? + } + }; + + mas_oidc_client::requests::userinfo::fetch_userinfo( + &client, + lazy_metadata.userinfo_endpoint().await?, + token_response.access_token.as_str(), + Some(JwtVerificationData { + issuer: provider.issuer.as_deref(), + jwks: &jwks, + signing_algorithm, + client_id: &provider.client_id, + }), + ) + .await? + } + None => { + mas_oidc_client::requests::userinfo::fetch_userinfo( + &client, + lazy_metadata.userinfo_endpoint().await?, + token_response.access_token.as_str(), + None, + ) + .await? + } + })) + } else { + None + }; + + if let Some(userinfo) = userinfo.clone() { + context = context.with_userinfo_claims(userinfo); + } + + let context = context.build(); + + let env = environment(); + + let template = provider + .claims_imports + .subject + .template + .as_deref() + .unwrap_or("{{ user.sub }}"); + let subject = env + .render_str(template, context.clone()) + .map_err(RouteError::ExtractSubject)?; + + if subject.is_empty() { + return Err(RouteError::EmptySubject); + } + + // Look for an existing link + let maybe_link = repo + .upstream_oauth_link() + .find_by_subject(&provider, &subject) + .await?; + + let link = if let Some(link) = maybe_link { + link + } else { + // Try to render the human account name if we have one, + // but just log if it fails + let human_account_name = provider + .claims_imports + .account_name + .template + .as_deref() + .and_then(|template| match env.render_str(template, context) { + Ok(name) => Some(name), + Err(e) => { + tracing::warn!( + error = &e as &dyn std::error::Error, + "Failed to render account name" + ); + None + } + }); + + repo.upstream_oauth_link() + .add(&mut rng, &clock, &provider, subject, human_account_name) + .await? + }; + + let session = repo + .upstream_oauth_session() + .complete_with_link( + &clock, + session, + &link, + token_response.id_token, + id_token_claims, + params.extra_callback_parameters, + userinfo, + ) + .await?; + + let cookie_jar = sessions_cookie + .add_link_to_session(session.id, link.id)? + .save(cookie_jar, &clock); + + repo.save().await?; + + Ok(( + cookie_jar, + url_builder.redirect(&mas_router::UpstreamOAuth2Link::new(link.id)), + ) + .into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/upstream_oauth2/cookie.rs b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/cookie.rs new file mode 100644 index 00000000..1946b235 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/cookie.rs @@ -0,0 +1,225 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// TODO: move that to a standalone cookie manager + +use chrono::{DateTime, Duration, Utc}; +use mas_axum_utils::cookies::CookieJar; +use mas_data_model::Clock; +use mas_router::PostAuthAction; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::Ulid; + +/// Name of the cookie +static COOKIE_NAME: &str = "upstream-oauth2-sessions"; + +/// Sessions expire after 10 minutes +static SESSION_MAX_TIME: Duration = Duration::microseconds(10 * 60 * 1000 * 1000); + +#[derive(Serialize, Deserialize, Debug)] +pub struct Payload { + session: Ulid, + provider: Ulid, + state: String, + link: Option, + post_auth_action: Option, +} + +impl Payload { + fn expired(&self, now: DateTime) -> bool { + let Ok(ts) = self.session.timestamp_ms().try_into() else { + return true; + }; + let Some(when) = DateTime::from_timestamp_millis(ts) else { + return true; + }; + now - when > SESSION_MAX_TIME + } +} + +#[derive(Serialize, Deserialize, Default, Debug)] +pub struct UpstreamSessions(Vec); + +#[derive(Debug, Error, PartialEq, Eq)] +#[error("upstream session not found")] +pub struct UpstreamSessionNotFound; + +impl UpstreamSessions { + /// Load the upstreams sessions cookie + pub fn load(cookie_jar: &CookieJar) -> Self { + match cookie_jar.load(COOKIE_NAME) { + Ok(Some(sessions)) => sessions, + Ok(None) => Self::default(), + Err(e) => { + tracing::warn!("Invalid upstream sessions cookie: {}", e); + Self::default() + } + } + } + + /// Returns true if the cookie is empty + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Save the upstreams sessions to the cookie jar + pub fn save(self, cookie_jar: CookieJar, clock: &C) -> CookieJar + where + C: Clock, + { + let this = self.expire(clock.now()); + cookie_jar.save(COOKIE_NAME, &this, false) + } + + fn expire(mut self, now: DateTime) -> Self { + self.0.retain(|p| !p.expired(now)); + self + } + + /// Add a new session, for a provider and a random state + pub fn add( + mut self, + session: Ulid, + provider: Ulid, + state: String, + post_auth_action: Option, + ) -> Self { + self.0.push(Payload { + session, + provider, + state, + link: None, + post_auth_action, + }); + self + } + + // Find a session ID from the provider and the state + pub fn find_session( + &self, + provider: Ulid, + state: &str, + ) -> Result<(Ulid, Option<&PostAuthAction>), UpstreamSessionNotFound> { + self.0 + .iter() + .find(|p| p.provider == provider && p.state == state && p.link.is_none()) + .map(|p| (p.session, p.post_auth_action.as_ref())) + .ok_or(UpstreamSessionNotFound) + } + + /// Save the link generated by a session + pub fn add_link_to_session( + mut self, + session: Ulid, + link: Ulid, + ) -> Result { + let payload = self + .0 + .iter_mut() + .find(|p| p.session == session && p.link.is_none()) + .ok_or(UpstreamSessionNotFound)?; + + payload.link = Some(link); + Ok(self) + } + + /// Find a session from its link + pub fn lookup_link( + &self, + link_id: Ulid, + ) -> Result<(Ulid, Option<&PostAuthAction>), UpstreamSessionNotFound> { + self.0 + .iter() + .filter(|p| p.link == Some(link_id)) + // Find the session with the highest ID, aka. the most recent one + .reduce(|a, b| if a.session > b.session { a } else { b }) + .map(|p| (p.session, p.post_auth_action.as_ref())) + .ok_or(UpstreamSessionNotFound) + } + + /// Mark a link as consumed to avoid replay + pub fn consume_link(mut self, link_id: Ulid) -> Result { + let pos = self + .0 + .iter() + .position(|p| p.link == Some(link_id)) + .ok_or(UpstreamSessionNotFound)?; + + self.0.remove(pos); + + Ok(self) + } +} + +#[cfg(test)] +mod tests { + use chrono::TimeZone; + use rand::SeedableRng; + use rand_chacha::ChaChaRng; + + use super::*; + + #[test] + fn test_session_cookie() { + let now = chrono::Utc + .with_ymd_and_hms(2018, 1, 18, 1, 30, 22) + .unwrap(); + let mut rng = ChaChaRng::seed_from_u64(42); + + let sessions = UpstreamSessions::default(); + + let provider_a = Ulid::from_datetime_with_source(now.into(), &mut rng); + let provider_b = Ulid::from_datetime_with_source(now.into(), &mut rng); + + let first_session = Ulid::from_datetime_with_source(now.into(), &mut rng); + let first_state = "first-state"; + let sessions = sessions.add(first_session, provider_a, first_state.into(), None); + + let now = now + Duration::microseconds(5 * 60 * 1000 * 1000); + + let second_session = Ulid::from_datetime_with_source(now.into(), &mut rng); + let second_state = "second-state"; + let sessions = sessions.add(second_session, provider_b, second_state.into(), None); + + let sessions = sessions.expire(now); + assert_eq!( + sessions.find_session(provider_a, first_state).unwrap().0, + first_session, + ); + assert_eq!( + sessions.find_session(provider_b, second_state).unwrap().0, + second_session + ); + assert!(sessions.find_session(provider_b, first_state).is_err()); + assert!(sessions.find_session(provider_a, second_state).is_err()); + + // Make the first session expire + let now = now + Duration::microseconds(6 * 60 * 1000 * 1000); + let sessions = sessions.expire(now); + assert!(sessions.find_session(provider_a, first_state).is_err()); + assert_eq!( + sessions.find_session(provider_b, second_state).unwrap().0, + second_session + ); + + // Associate a link with the second + let second_link = Ulid::from_datetime_with_source(now.into(), &mut rng); + let sessions = sessions + .add_link_to_session(second_session, second_link) + .unwrap(); + + // Now the session can't be found with its state + assert!(sessions.find_session(provider_b, second_state).is_err()); + + // But it can be looked up by its link + assert_eq!(sessions.lookup_link(second_link).unwrap().0, second_session); + // And it can be consumed + let sessions = sessions.consume_link(second_link).unwrap(); + // But only once + assert!(sessions.consume_link(second_link).is_err()); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/upstream_oauth2/link.rs b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/link.rs new file mode 100644 index 00000000..357a7a72 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/link.rs @@ -0,0 +1,2348 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + net::IpAddr, + sync::{Arc, LazyLock}, +}; + +use axum::{ + Form, + extract::{Path, State}, + response::{Html, IntoResponse, Response}, +}; +use axum_extra::typed_header::TypedHeader; +use hyper::StatusCode; +use mas_axum_utils::{ + GenericError, SessionInfoExt, + cookies::CookieJar, + csrf::{CsrfExt, ProtectedForm}, + record_error, +}; +use mas_data_model::{ + BoxClock, BoxRng, UpstreamOAuthAuthorizationSession, UpstreamOAuthProviderOnConflict, + UserRegistration, +}; +use mas_jose::jwt::Jwt; +use mas_matrix::HomeserverConnection; +use mas_policy::Policy; +use mas_router::UrlBuilder; +use mas_storage::{ + BoxRepository, Pagination, RepositoryAccess, + upstream_oauth2::{ + UpstreamOAuthLinkFilter, UpstreamOAuthLinkRepository, UpstreamOAuthSessionRepository, + }, + user::{BrowserSessionRepository, UserEmailRepository, UserRepository}, +}; +use mas_templates::{ + AccountInactiveContext, ErrorContext, FieldError, FormError, TemplateContext, Templates, + ToFormState, UpstreamExistingLinkContext, UpstreamRegister, UpstreamSuggestLink, +}; +use minijinja::Environment; +use opentelemetry::{Key, KeyValue, metrics::Counter}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::Ulid; + +use super::{ + UpstreamSessionsCookie, + template::{AttributeMappingContext, environment}, +}; +use crate::{ + BoundActivityTracker, METER, PreferredLanguage, SiteConfig, impl_from_error_for_route, + views::{register::UserRegistrationSessionsCookie, shared::OptionalPostAuthAction}, +}; + +static LOGIN_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.upstream_oauth2.login") + .with_description("Successful upstream OAuth 2.0 login to existing accounts") + .with_unit("{login}") + .build() +}); +static REGISTRATION_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.upstream_oauth2.registration") + .with_description("Successful upstream OAuth 2.0 registration") + .with_unit("{registration}") + .build() +}); +const PROVIDER: Key = Key::from_static_str("provider"); + +const DEFAULT_LOCALPART_TEMPLATE: &str = "{{ user.preferred_username }}"; +const DEFAULT_DISPLAYNAME_TEMPLATE: &str = "{{ user.name }}"; +const DEFAULT_EMAIL_TEMPLATE: &str = "{{ user.email }}"; + +#[derive(Debug, Error)] +pub(crate) enum RouteError { + /// Couldn't find the link specified in the URL + #[error("Link not found")] + LinkNotFound, + + /// Couldn't find the session on the link + #[error("Session {0} not found")] + SessionNotFound(Ulid), + + /// Couldn't find the user + #[error("User {0} not found")] + UserNotFound(Ulid), + + /// Couldn't find upstream provider + #[error("Upstream provider {0} not found")] + ProviderNotFound(Ulid), + + /// Required attribute rendered to an empty string + #[error("Template {template:?} rendered to an empty string")] + RequiredAttributeEmpty { template: String }, + + /// Required claim was missing in `id_token` + #[error( + "Template {template:?} could not be rendered from the upstream provider's response for required claim" + )] + RequiredAttributeRender { + template: String, + + #[source] + source: minijinja::Error, + }, + + /// Session was already consumed + #[error("Session {0} already consumed")] + SessionConsumed(Ulid), + + #[error("Missing session cookie")] + MissingCookie, + + #[error("Invalid form action")] + InvalidFormAction, + + #[error("Homeserver connection error")] + HomeserverConnection(#[source] anyhow::Error), + + #[error(transparent)] + Internal(Box), +} + +impl_from_error_for_route!(mas_templates::TemplateError); +impl_from_error_for_route!(mas_axum_utils::csrf::CsrfError); +impl_from_error_for_route!(super::cookie::UpstreamSessionNotFound); +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_policy::EvaluationError); +impl_from_error_for_route!(mas_jose::jwt::JwtDecodeError); + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!( + self, + Self::Internal(_) + | Self::RequiredAttributeEmpty { .. } + | Self::RequiredAttributeRender { .. } + | Self::SessionNotFound(_) + | Self::ProviderNotFound(_) + | Self::UserNotFound(_) + | Self::HomeserverConnection(_) + ); + + let status_code = match self { + Self::LinkNotFound => StatusCode::NOT_FOUND, + _ => StatusCode::INTERNAL_SERVER_ERROR, + }; + + let response = GenericError::new(status_code, self); + (sentry_event_id, response).into_response() + } +} + +/// Utility function to render an attribute template. +/// +/// # Parameters +/// +/// * `environment` - The minijinja environment to use to render the template +/// * `template` - The template to use to render the claim +/// * `required` - Whether the attribute is required or not +/// +/// # Errors +/// +/// Returns an error if the attribute is required but fails to render or is +/// empty +fn render_attribute_template( + environment: &Environment, + template: &str, + context: &minijinja::Value, + required: bool, +) -> Result, RouteError> { + match environment.render_str(template, context) { + Ok(value) if value.is_empty() => { + if required { + return Err(RouteError::RequiredAttributeEmpty { + template: template.to_owned(), + }); + } + + Ok(None) + } + + Ok(value) => Ok(Some(value)), + + Err(source) => { + if required { + return Err(RouteError::RequiredAttributeRender { + template: template.to_owned(), + source, + }); + } + + tracing::warn!(error = &source as &dyn std::error::Error, %template, "Error while rendering template"); + Ok(None) + } + } +} + +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "lowercase", tag = "action")] +pub(crate) enum FormData { + Register { + #[serde(default)] + username: Option, + #[serde(default)] + import_email: Option, + #[serde(default)] + import_display_name: Option, + #[serde(default)] + accept_terms: Option, + }, + Link, +} + +impl ToFormState for FormData { + type Field = mas_templates::UpstreamRegisterFormField; +} + +#[tracing::instrument( + name = "handlers.upstream_oauth2.link.get", + fields(upstream_oauth_link.id = %link_id), + skip_all, +)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + mut policy: Policy, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + State(homeserver): State>, + cookie_jar: CookieJar, + activity_tracker: BoundActivityTracker, + user_agent: Option>, + Path(link_id): Path, +) -> Result { + let user_agent = user_agent.map(|ua| ua.as_str().to_owned()); + let sessions_cookie = UpstreamSessionsCookie::load(&cookie_jar); + let (session_id, post_auth_action) = sessions_cookie + .lookup_link(link_id) + .map_err(|_| RouteError::MissingCookie)?; + + let link = repo + .upstream_oauth_link() + .lookup(link_id) + .await? + .ok_or(RouteError::LinkNotFound)?; + + let upstream_session = repo + .upstream_oauth_session() + .lookup(session_id) + .await? + .ok_or(RouteError::SessionNotFound(session_id))?; + + // This checks that we're in a browser session which is allowed to consume this + // link: the upstream auth session should have been started in this browser. + if upstream_session.link_id() != Some(link.id) { + return Err(RouteError::SessionNotFound(session_id)); + } + + if upstream_session.is_consumed() { + return Err(RouteError::SessionConsumed(session_id)); + } + + let (user_session_info, cookie_jar) = cookie_jar.session_info(); + let (csrf_token, mut cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let maybe_user_session = user_session_info.load_active_session(&mut repo).await?; + + let response = match (maybe_user_session, link.user_id) { + (Some(session), Some(user_id)) if session.user.id == user_id => { + // Session already linked, and link matches the currently logged + // user. Mark the session as consumed and renew the authentication. + let upstream_session = repo + .upstream_oauth_session() + .consume(&clock, upstream_session, &session) + .await?; + + repo.browser_session() + .authenticate_with_upstream(&mut rng, &clock, &session, &upstream_session) + .await?; + + cookie_jar = cookie_jar.set_session(&session); + + repo.save().await?; + + let post_auth_action = OptionalPostAuthAction { + post_auth_action: post_auth_action.cloned(), + }; + + post_auth_action.go_next(&url_builder).into_response() + } + + (Some(user_session), Some(user_id)) => { + // Session already linked, but link doesn't match the currently + // logged user. Suggest logging out of the current user + // and logging in with the new one + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::UserNotFound(user_id))?; + + let ctx = UpstreamExistingLinkContext::new(user) + .with_session(user_session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + Html(templates.render_upstream_oauth2_link_mismatch(&ctx)?).into_response() + } + + (Some(user_session), None) => { + // Session not linked, but user logged in: suggest linking account + let ctx = UpstreamSuggestLink::new(&link) + .with_session(user_session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + Html(templates.render_upstream_oauth2_suggest_link(&ctx)?).into_response() + } + + (None, Some(user_id)) => { + // Session linked, but user not logged in: do the login + let user = repo + .user() + .lookup(user_id) + .await? + .ok_or(RouteError::UserNotFound(user_id))?; + + // Check that the user is not locked or deactivated + if user.deactivated_at.is_some() { + // The account is deactivated, show the 'account deactivated' fallback + let ctx = AccountInactiveContext::new(user) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + let fallback = templates.render_account_deactivated(&ctx)?; + return Ok((cookie_jar, Html(fallback).into_response())); + } + + if user.locked_at.is_some() { + // The account is locked, show the 'account locked' fallback + let ctx = AccountInactiveContext::new(user) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + let fallback = templates.render_account_locked(&ctx)?; + return Ok((cookie_jar, Html(fallback).into_response())); + } + + let session = repo + .browser_session() + .add(&mut rng, &clock, &user, user_agent) + .await?; + + let upstream_session = repo + .upstream_oauth_session() + .consume(&clock, upstream_session, &session) + .await?; + + repo.browser_session() + .authenticate_with_upstream(&mut rng, &clock, &session, &upstream_session) + .await?; + + let post_auth_action = OptionalPostAuthAction { + post_auth_action: post_auth_action.cloned(), + }; + + cookie_jar = sessions_cookie + .consume_link(link_id)? + .save(cookie_jar, &clock); + cookie_jar = cookie_jar.set_session(&session); + + repo.save().await?; + + LOGIN_COUNTER.add( + 1, + &[KeyValue::new( + PROVIDER, + upstream_session.provider_id.to_string(), + )], + ); + + post_auth_action.go_next(&url_builder).into_response() + } + + (None, None) => { + // Session not linked and used not logged in: suggest creating an + // account or logging in an existing user + let id_token = upstream_session.id_token().map(Jwt::try_from).transpose()?; + + let provider = repo + .upstream_oauth_provider() + .lookup(link.provider_id) + .await? + .ok_or(RouteError::ProviderNotFound(link.provider_id))?; + + let env = environment(); + + let mut context = AttributeMappingContext::new(); + if let Some(id_token) = id_token { + let (_, payload) = id_token.into_parts(); + context = context.with_id_token_claims(payload); + } + if let Some(extra_callback_parameters) = upstream_session.extra_callback_parameters() { + context = context.with_extra_callback_parameters(extra_callback_parameters.clone()); + } + if let Some(userinfo) = upstream_session.userinfo() { + context = context.with_userinfo_claims(userinfo.clone()); + } + let context = context.build(); + + let displayname = if provider.claims_imports.displayname.ignore() { + None + } else { + let template = provider + .claims_imports + .displayname + .template + .as_deref() + .unwrap_or(DEFAULT_DISPLAYNAME_TEMPLATE); + + render_attribute_template( + &env, + template, + &context, + provider.claims_imports.displayname.is_required(), + )? + }; + + let email = if provider.claims_imports.email.ignore() { + None + } else { + let template = provider + .claims_imports + .email + .template + .as_deref() + .unwrap_or(DEFAULT_EMAIL_TEMPLATE); + + render_attribute_template( + &env, + template, + &context, + provider.claims_imports.email.is_required(), + )? + }; + + // We do a bunch of checks for the localpart. Instead of using nested ifs all + // the way, we use a labelled block, and use `break` for 'exiting' early when + // needed + let localpart = 'localpart: { + if provider.claims_imports.localpart.ignore() { + break 'localpart None; + } + + let template = provider + .claims_imports + .localpart + .template + .as_deref() + .unwrap_or(DEFAULT_LOCALPART_TEMPLATE); + + let Some(localpart) = render_attribute_template( + &env, + template, + &context, + provider.claims_imports.localpart.is_required(), + )? + else { + break 'localpart None; + }; + + let forced_or_required = provider.claims_imports.localpart.is_forced_or_required(); + + // We've got a localpart from the template. Let's run the policy + // engine on this registration and react early to a problem on + // the username + let res = policy + .evaluate_register(mas_policy::RegisterInput { + registration_method: mas_policy::RegistrationMethod::UpstreamOAuth2, + username: &localpart, + email: email.as_deref(), + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent: user_agent.clone(), + }, + }) + .await?; + + // We don't do a full policy check at this point, only look for violations on + // the username + if res + .violations + .iter() + .any(|violation| violation.field.as_deref() == Some("username")) + { + if !forced_or_required { + tracing::warn!( + upstream_oauth_provider.id = %provider.id, + upstream_oauth_link.id = %link.id, + "Upstream provider returned a localpart {localpart:?} which was denied by the policy ({res}). As the username is just a suggestion, it was ignored." + ); + break 'localpart None; + } + + // If the username policy check fails, we display an error message. + // TODO: translate + let ctx = ErrorContext::new() + .with_code("Policy error") + .with_description(format!( + r"Upstream account provider returned {localpart:?} as username, + which does not pass the policy check: {res}" + )) + .with_language(&locale); + + return Ok(( + cookie_jar, + Html(templates.render_error(&ctx)?).into_response(), + )); + } + + // We got a localpart from the template. We need to check if it's + // available, and if it's not apply the conflict resolution setup in + // the config + let maybe_existing_user = repo.user().find_by_username(&localpart).await?; + if let Some(existing_user) = maybe_existing_user { + if !forced_or_required { + tracing::warn!( + upstream_oauth_provider.id = %provider.id, + upstream_oauth_link.id = %link.id, + user.id = %existing_user.id, + "Upstream provider returned a localpart {localpart:?} which is already used by another user. As the username is just a suggestion, it was ignored." + ); + break 'localpart None; + } + + match provider.claims_imports.localpart.on_conflict { + // We matched an existing user, but the server doesn't allow us to link to + // existing users automatically. In this case, we error out + UpstreamOAuthProviderOnConflict::Fail => { + tracing::warn!( + upstream_oauth_provider.id = %provider.id, + upstream_oauth_link.id = %link.id, + user.id = %existing_user.id, + "Upstream provider returned a localpart {localpart:?} which is already used by another user. Configuration doesn't allow for automatic linking of existing users." + ); + + // TODO: translate + let ctx = ErrorContext::new() + .with_code("User exists") + .with_description(format!( + r"Upstream account provider returned {localpart:?} as username, + which is not linked to that upstream account. Your homeserver does not allow + linking an upstream account to an existing account" + )) + .with_language(&locale); + + return Ok(( + cookie_jar, + Html(templates.render_error(&ctx)?).into_response(), + )); + } + + // We matched an existing user and the conflict resolution is to add the + // link to the existing user. In this case, we add the link + UpstreamOAuthProviderOnConflict::Add => { + tracing::info!( + user.id = %existing_user.id, + upstream_oauth_provider.id = %provider.id, + upstream_oauth_link.id = %link.id, + upstream_oauth_link.subject = link.subject, + "Upstream account mapped localpart {localpart:?} matched an existing user, linking" + ); + + // Add link to the user + repo.upstream_oauth_link() + .associate_to_user(&link, &existing_user) + .await?; + } + + // We matched an existing user and the conflict resolution is to replace any + // link on the existing user with this one + UpstreamOAuthProviderOnConflict::Replace => { + // Find existing links for this provider and user + let filter = UpstreamOAuthLinkFilter::new() + .for_provider(&provider) + .for_user(&existing_user); + let mut cursor = Pagination::first(100); + let mut removed = 0; + loop { + let page = repo.upstream_oauth_link().list(filter, cursor).await?; + for edge in page.edges { + // Remove any existing links for this provider and user + repo.upstream_oauth_link().remove(&clock, edge.node).await?; + cursor = cursor.after(edge.cursor); + removed += 1; + } + + if !page.has_next_page { + break; + } + } + + if removed > 0 { + tracing::warn!( + user.id = %existing_user.id, + upstream_oauth_provider.id = %provider.id, + upstream_oauth_link.id = %link.id, + upstream_oauth_link.subject = link.subject, + "Upstream account mapped localpart {localpart:?} matched an existing user, replaced {removed} links" + ); + } else { + tracing::info!( + user.id = %existing_user.id, + upstream_oauth_provider.id = %provider.id, + upstream_oauth_link.id = %link.id, + upstream_oauth_link.subject = link.subject, + "Upstream account mapped localpart {localpart:?} matched an existing user, linking" + ); + } + + // Add link to the user + repo.upstream_oauth_link() + .associate_to_user(&link, &existing_user) + .await?; + } + + // We matched an existing user and the conflict resolution is to link to the + // existing user *only if* there is no existing link on that user + UpstreamOAuthProviderOnConflict::Set => { + // Find existing links for this provider and user + let filter = UpstreamOAuthLinkFilter::new() + .for_provider(&provider) + .for_user(&existing_user); + + let count = repo.upstream_oauth_link().count(filter).await?; + if count > 0 { + tracing::warn!( + upstream_oauth_provider.id = %provider.id, + upstream_oauth_link.id = %link.id, + user.id = %existing_user.id, + "Upstream provider returned a localpart {localpart:?} matching an existing user who already has {count} link(s) to this provider, which isn't allowed by the conflict resolution" + ); + + // TODO: translate + let ctx = ErrorContext::new() + .with_code("User exists") + .with_description(format!( + r"Upstream account provider returned {localpart:?} as username, + but this user already has an existing link to this provider. + Your homeserver does not allow replacing upstream account links automatically." + )) + .with_language(&locale); + + return Ok(( + cookie_jar, + Html(templates.render_error(&ctx)?).into_response(), + )); + } + + // Add link to the user + repo.upstream_oauth_link() + .associate_to_user(&link, &existing_user) + .await?; + } + } + + // Now that we've resolved the conflict, log in that existing user + + // Check that the user is not locked or deactivated + if existing_user.deactivated_at.is_some() { + // The account is deactivated, show the 'account deactivated' fallback + let ctx = AccountInactiveContext::new(existing_user) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + let fallback = templates.render_account_deactivated(&ctx)?; + return Ok((cookie_jar, Html(fallback).into_response())); + } + + if existing_user.locked_at.is_some() { + // The account is locked, show the 'account locked' fallback + let ctx = AccountInactiveContext::new(existing_user) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + let fallback = templates.render_account_locked(&ctx)?; + return Ok((cookie_jar, Html(fallback).into_response())); + } + + let session = repo + .browser_session() + .add(&mut rng, &clock, &existing_user, user_agent) + .await?; + + let upstream_session = repo + .upstream_oauth_session() + .consume(&clock, upstream_session, &session) + .await?; + + repo.browser_session() + .authenticate_with_upstream(&mut rng, &clock, &session, &upstream_session) + .await?; + + let post_auth_action = OptionalPostAuthAction { + post_auth_action: post_auth_action.cloned(), + }; + + let cookie_jar = sessions_cookie + .consume_link(link_id)? + .save(cookie_jar, &clock) + .set_session(&session); + + repo.save().await?; + + // Count this 'on-the-fly' linking as a login + LOGIN_COUNTER.add( + 1, + &[KeyValue::new( + PROVIDER, + upstream_session.provider_id.to_string(), + )], + ); + + return Ok(( + cookie_jar, + post_auth_action.go_next(&url_builder).into_response(), + )); + } + + // Now let's check if the localpart is allowed by the homeserver. It's possible + // that it's plain invalid (although that should have been caught by the + // policy), or just reserved by an application service + let is_available = homeserver + .is_localpart_available(&localpart) + .await + .map_err(RouteError::HomeserverConnection)?; + + if !is_available { + if !forced_or_required { + tracing::warn!( + upstream_oauth_provider.id = %provider.id, + upstream_oauth_link.id = %link.id, + "Upstream provider returned a localpart {localpart:?} which isn't available on the homeserver. As the username is just a suggestion, it was ignored." + ); + break 'localpart None; + } + + // TODO: translate + let ctx = ErrorContext::new() + .with_code("Localpart not available") + .with_description(format!( + r"Localpart {localpart:?} is not available on this homeserver" + )) + .with_language(&locale); + + return Ok(( + cookie_jar, + Html(templates.render_error(&ctx)?).into_response(), + )); + } + + Some(localpart) + }; + + if provider.claims_imports.skip_confirmation { + let Some(localpart) = localpart else { + return Err(RouteError::Internal( + "No localpart available even though the provider is configured to skip confirmation, this is a bug!".into() + )); + }; + + // Register on the fly + REGISTRATION_COUNTER.add(1, &[KeyValue::new(PROVIDER, provider.id.to_string())]); + + let registration = prepare_user_registration( + &mut rng, + &clock, + &mut repo, + upstream_session, + localpart, + displayname, + email, + activity_tracker.ip(), + user_agent, + post_auth_action.map(|action| serde_json::json!(action)), + ) + .await?; + + let registrations = UserRegistrationSessionsCookie::load(&cookie_jar); + + let cookie_jar = sessions_cookie + .consume_link(link_id)? + .save(cookie_jar, &clock); + + let cookie_jar = registrations.add(®istration).save(cookie_jar, &clock); + + repo.save().await?; + + // Redirect to the user registration flow, in case we have any other step to + // finish + return Ok(( + cookie_jar, + url_builder + .redirect(&mas_router::RegisterFinish::new(registration.id)) + .into_response(), + )); + } + + // Else we show the upstream registration screen + let mut ctx = UpstreamRegister::new(link.clone(), provider.clone()); + + if let Some(localpart) = localpart { + ctx = ctx.with_localpart( + localpart, + provider.claims_imports.localpart.is_forced_or_required(), + ); + } + + if let Some(displayname) = displayname { + ctx = ctx.with_display_name( + displayname, + provider.claims_imports.displayname.is_forced_or_required(), + ); + } + + if let Some(email) = email { + ctx = ctx.with_email(email, provider.claims_imports.email.is_forced_or_required()); + } + + let ctx = ctx.with_csrf(csrf_token.form_value()).with_language(locale); + + Html(templates.render_upstream_oauth2_do_register(&ctx)?).into_response() + } + }; + + Ok((cookie_jar, response)) +} + +#[tracing::instrument( + name = "handlers.upstream_oauth2.link.post", + fields(upstream_oauth_link.id = %link_id), + skip_all, +)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + cookie_jar: CookieJar, + user_agent: Option>, + mut policy: Policy, + PreferredLanguage(locale): PreferredLanguage, + activity_tracker: BoundActivityTracker, + State(templates): State, + State(homeserver): State>, + State(url_builder): State, + State(site_config): State, + Path(link_id): Path, + Form(form): Form>, +) -> Result { + let user_agent = user_agent.map(|ua| ua.as_str().to_owned()); + let form = cookie_jar.verify_form(&clock, form)?; + + let sessions_cookie = UpstreamSessionsCookie::load(&cookie_jar); + let (session_id, post_auth_action) = sessions_cookie + .lookup_link(link_id) + .map_err(|_| RouteError::MissingCookie)?; + + let link = repo + .upstream_oauth_link() + .lookup(link_id) + .await? + .ok_or(RouteError::LinkNotFound)?; + + let upstream_session = repo + .upstream_oauth_session() + .lookup(session_id) + .await? + .ok_or(RouteError::SessionNotFound(session_id))?; + + // This checks that we're in a browser session which is allowed to consume this + // link: the upstream auth session should have been started in this browser. + if upstream_session.link_id() != Some(link.id) { + return Err(RouteError::SessionNotFound(session_id)); + } + + if upstream_session.is_consumed() { + return Err(RouteError::SessionConsumed(session_id)); + } + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let (user_session_info, cookie_jar) = cookie_jar.session_info(); + let maybe_user_session = user_session_info.load_active_session(&mut repo).await?; + let form_state = form.to_form_state(); + + match (maybe_user_session, link.user_id, form) { + (Some(session), None, FormData::Link) => { + // The user is already logged in, the link is not linked to any user, and the + // user asked to link their account. + repo.upstream_oauth_link() + .associate_to_user(&link, &session.user) + .await?; + + let upstream_session = repo + .upstream_oauth_session() + .consume(&clock, upstream_session, &session) + .await?; + + repo.browser_session() + .authenticate_with_upstream(&mut rng, &clock, &session, &upstream_session) + .await?; + + let post_auth_action = OptionalPostAuthAction { + post_auth_action: post_auth_action.cloned(), + }; + + let cookie_jar = sessions_cookie + .consume_link(link_id)? + .save(cookie_jar, &clock); + let cookie_jar = cookie_jar.set_session(&session); + + repo.save().await?; + + Ok((cookie_jar, post_auth_action.go_next(&url_builder)).into_response()) + } + + ( + None, + None, + FormData::Register { + username, + import_email, + import_display_name, + accept_terms, + }, + ) => { + // The user got the form to register a new account, and is not logged in. + // Depending on the claims_imports, we've let the user choose their username, + // choose whether they want to import the email and display name, or + // not. + + // Those fields are Some("on") if the checkbox is checked + let import_email = import_email.is_some(); + let import_display_name = import_display_name.is_some(); + let accept_terms = accept_terms.is_some(); + + let id_token = upstream_session.id_token().map(Jwt::try_from).transpose()?; + + let provider = repo + .upstream_oauth_provider() + .lookup(link.provider_id) + .await? + .ok_or(RouteError::ProviderNotFound(link.provider_id))?; + + // Let's try to import the claims from the ID token + let env = environment(); + + let mut context = AttributeMappingContext::new(); + if let Some(id_token) = id_token { + let (_, payload) = id_token.into_parts(); + context = context.with_id_token_claims(payload); + } + if let Some(extra_callback_parameters) = upstream_session.extra_callback_parameters() { + context = context.with_extra_callback_parameters(extra_callback_parameters.clone()); + } + if let Some(userinfo) = upstream_session.userinfo() { + context = context.with_userinfo_claims(userinfo.clone()); + } + let context = context.build(); + + // Create a template context in case we need to re-render because of an error + let mut ctx = UpstreamRegister::new(link.clone(), provider.clone()); + + let display_name = if provider + .claims_imports + .displayname + .should_import(import_display_name) + { + let template = provider + .claims_imports + .displayname + .template + .as_deref() + .unwrap_or(DEFAULT_DISPLAYNAME_TEMPLATE); + + render_attribute_template( + &env, + template, + &context, + provider.claims_imports.displayname.is_required(), + )? + } else { + None + }; + + if let Some(ref display_name) = display_name { + ctx = ctx.with_display_name( + display_name.clone(), + provider.claims_imports.displayname.is_forced_or_required(), + ); + } + + let email = if provider.claims_imports.email.should_import(import_email) { + let template = provider + .claims_imports + .email + .template + .as_deref() + .unwrap_or(DEFAULT_EMAIL_TEMPLATE); + + render_attribute_template( + &env, + template, + &context, + provider.claims_imports.email.is_required(), + )? + } else { + None + }; + + if let Some(ref email) = email { + ctx = ctx.with_email( + email.clone(), + provider.claims_imports.email.is_forced_or_required(), + ); + } + + let username = if provider.claims_imports.localpart.is_forced_or_required() { + let template = provider + .claims_imports + .localpart + .template + .as_deref() + .unwrap_or(DEFAULT_LOCALPART_TEMPLATE); + + render_attribute_template(&env, template, &context, true)? + } else { + // If there is no forced username, we can use the one the user entered + username + } + .unwrap_or_default(); + + ctx = ctx.with_localpart( + username.clone(), + provider.claims_imports.localpart.is_forced_or_required(), + ); + + // Validate the form + let form_state = { + let mut form_state = form_state; + let mut homeserver_denied_username = false; + if username.is_empty() { + form_state.add_error_on_field( + mas_templates::UpstreamRegisterFormField::Username, + FieldError::Required, + ); + } else if repo.user().exists(&username).await? { + form_state.add_error_on_field( + mas_templates::UpstreamRegisterFormField::Username, + FieldError::Exists, + ); + } else if !homeserver + .is_localpart_available(&username) + .await + .map_err(RouteError::HomeserverConnection)? + { + // The user already exists on the homeserver + tracing::warn!( + %username, + "Homeserver denied username provided by user" + ); + + // We defer adding the error on the field, until we know whether we had another + // error from the policy, to avoid showing both + homeserver_denied_username = true; + } + + // If we have a TOS in the config, make sure the user has accepted it + if site_config.tos_uri.is_some() && !accept_terms { + form_state.add_error_on_field( + mas_templates::UpstreamRegisterFormField::AcceptTerms, + FieldError::Required, + ); + } + + // Policy check + let res = policy + .evaluate_register(mas_policy::RegisterInput { + registration_method: mas_policy::RegistrationMethod::UpstreamOAuth2, + username: &username, + email: email.as_deref(), + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent: user_agent.clone(), + }, + }) + .await?; + + for violation in res.violations { + match violation.field.as_deref() { + Some("username") => { + // If the homeserver denied the username, but we also had an error on + // the policy side, we don't want to show + // both, so we reset the state here + homeserver_denied_username = false; + form_state.add_error_on_field( + mas_templates::UpstreamRegisterFormField::Username, + FieldError::Policy { + code: violation.code.map(|c| c.as_str()), + message: violation.msg, + }, + ); + } + _ => form_state.add_error_on_form(FormError::Policy { + code: violation.code.map(|c| c.as_str()), + message: violation.msg, + }), + } + } + + if homeserver_denied_username { + // XXX: we may want to return different errors like "this username is reserved" + form_state.add_error_on_field( + mas_templates::UpstreamRegisterFormField::Username, + FieldError::Exists, + ); + } + + form_state + }; + + if !form_state.is_valid() { + let ctx = ctx + .with_form_state(form_state) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + return Ok(( + cookie_jar, + Html(templates.render_upstream_oauth2_do_register(&ctx)?), + ) + .into_response()); + } + + REGISTRATION_COUNTER.add(1, &[KeyValue::new(PROVIDER, provider.id.to_string())]); + + let mut registration = prepare_user_registration( + &mut rng, + &clock, + &mut repo, + upstream_session, + username, + display_name, + email, + activity_tracker.ip(), + user_agent, + post_auth_action.map(|action| serde_json::json!(action)), + ) + .await?; + + if let Some(terms_url) = &site_config.tos_uri { + registration = repo + .user_registration() + .set_terms_url(registration, terms_url.clone()) + .await?; + } + + let registrations = UserRegistrationSessionsCookie::load(&cookie_jar); + + let cookie_jar = sessions_cookie + .consume_link(link_id)? + .save(cookie_jar, &clock); + + let cookie_jar = registrations.add(®istration).save(cookie_jar, &clock); + + repo.save().await?; + + // Redirect to the user registration flow, in case we have any other step to + // finish + Ok(( + cookie_jar, + url_builder.redirect(&mas_router::RegisterFinish::new(registration.id)), + ) + .into_response()) + } + + _ => Err(RouteError::InvalidFormAction), + } +} + +/// Create a user registration using attributes got from the upstream +/// authorization session +async fn prepare_user_registration( + rng: &mut BoxRng, + clock: &BoxClock, + repo: &mut BoxRepository, + upstream_session: UpstreamOAuthAuthorizationSession, + localpart: String, + displayname: Option, + email: Option, + ip_address: Option, + user_agent: Option, + post_auth_action: Option, +) -> Result { + let mut registration = repo + .user_registration() + .add( + rng, + clock, + localpart, + ip_address, + user_agent, + post_auth_action, + ) + .await?; + + // If we have an email, add an email authentication and complete it + if let Some(email) = email { + let authentication = repo + .user_email() + .add_authentication_for_registration(rng, clock, email, ®istration) + .await?; + let authentication = repo + .user_email() + .complete_authentication_with_upstream(clock, authentication, &upstream_session) + .await?; + + registration = repo + .user_registration() + .set_email_authentication(registration, &authentication) + .await?; + } + + // If we have a display name, add it to the registration + if let Some(name) = displayname { + registration = repo + .user_registration() + .set_display_name(registration, name) + .await?; + } + + let registration = repo + .user_registration() + .set_upstream_oauth_authorization_session(registration, &upstream_session) + .await?; + + Ok(registration) +} + +#[cfg(test)] +mod tests { + use hyper::{Request, StatusCode, header::CONTENT_TYPE}; + use mas_data_model::{ + UpstreamOAuthAuthorizationSession, UpstreamOAuthLink, UpstreamOAuthProviderClaimsImports, + UpstreamOAuthProviderImportPreference, UpstreamOAuthProviderLocalpartPreference, + UpstreamOAuthProviderTokenAuthMethod, UserEmailAuthentication, UserRegistration, + }; + use mas_iana::jose::JsonWebSignatureAlg; + use mas_jose::jwt::{JsonWebSignatureHeader, Jwt}; + use mas_keystore::Keystore; + use mas_router::Route; + use mas_storage::{Repository, RepositoryError, upstream_oauth2::UpstreamOAuthProviderParams}; + use oauth2_types::scope::{OPENID, Scope}; + use rand_chacha::ChaChaRng; + use serde_json::Value; + use sqlx::PgPool; + use ulid::Ulid; + + use super::UpstreamSessionsCookie; + use crate::test_utils::{CookieHelper, RequestBuilderExt, ResponseExt, TestState, setup}; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let mut rng = state.rng(); + let cookies = CookieHelper::new(); + + let claims_imports = UpstreamOAuthProviderClaimsImports { + localpart: UpstreamOAuthProviderLocalpartPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Force, + template: None, + on_conflict: mas_data_model::UpstreamOAuthProviderOnConflict::default(), + }, + email: UpstreamOAuthProviderImportPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Force, + template: None, + }, + ..UpstreamOAuthProviderClaimsImports::default() + }; + + let id_token_claims = serde_json::json!({ + "preferred_username": "john", + "email": "john@example.com", + "email_verified": true, + }); + + // Grab a key to sign the id_token + // We could generate a key on the fly, but because we have one available here, + // why not use it? + let key = state + .key_store + .signing_key_for_algorithm(&JsonWebSignatureAlg::Rs256) + .unwrap(); + + let signer = key + .params() + .signing_key_for_alg(&JsonWebSignatureAlg::Rs256) + .unwrap(); + let header = JsonWebSignatureHeader::new(JsonWebSignatureAlg::Rs256); + let id_token = + Jwt::sign_with_rng(&mut rng, header, id_token_claims.clone(), &signer).unwrap(); + + // Provision a provider and a link + let mut repo = state.repository().await.unwrap(); + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + + let session = repo + .upstream_oauth_session() + .add( + &mut rng, + &state.clock, + &provider, + "state".to_owned(), + None, + None, + ) + .await + .unwrap(); + + let link = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider, + "subject".to_owned(), + None, + ) + .await + .unwrap(); + + let session = repo + .upstream_oauth_session() + .complete_with_link( + &state.clock, + session, + &link, + Some(id_token.into_string()), + Some(id_token_claims), + None, + None, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let upstream_sessions = UpstreamSessionsCookie::default() + .add(session.id, provider.id, "state".to_owned(), None) + .add_link_to_session(session.id, link.id) + .unwrap(); + let cookie_jar = upstream_sessions.save(cookie_jar, &state.clock); + cookies.import(cookie_jar); + + let request = Request::get(&*mas_router::UpstreamOAuth2Link::new(link.id).path()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + let request = Request::post(&*mas_router::UpstreamOAuth2Link::new(link.id).path()).form( + serde_json::json!({ + "csrf": csrf_token, + "action": "register", + "import_email": "on", + "accept_terms": "on", + }), + ); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::SEE_OTHER); + let location = response.headers().get(hyper::header::LOCATION).unwrap(); + // Grab the registration ID from the redirected URL: + // /register/steps/{id}/finish + let registration_id: Ulid = str::from_utf8(location.as_bytes()) + .unwrap() + .rsplit('/') + .nth(1) + .expect("Location to have two slashes") + .parse() + .expect("last segment of location to be a ULID"); + + // Check that we have a registered user, with the email imported + let mut repo = state.repository().await.unwrap(); + let registration: UserRegistration = repo + .user_registration() + .lookup(registration_id) + .await + .unwrap() + .expect("user registration exists"); + + assert_eq!(registration.password, None); + assert_eq!(registration.completed_at, None); + assert_eq!(registration.username, "john"); + + let email_auth_id = registration + .email_authentication_id + .expect("registration should have an email authentication"); + let email_auth: UserEmailAuthentication = repo + .user_email() + .lookup_authentication(email_auth_id) + .await + .unwrap() + .expect("email authentication should exist"); + assert_eq!(email_auth.email, "john@example.com"); + assert!(email_auth.completed_at.is_some()); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_skip_confirmation(pool: PgPool) { + // Same test as test_register, but checks that we get straight to the + // registration flow skipping the confirmation + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let mut rng = state.rng(); + let cookies = CookieHelper::new(); + + let claims_imports = UpstreamOAuthProviderClaimsImports { + skip_confirmation: true, + localpart: UpstreamOAuthProviderLocalpartPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + on_conflict: mas_data_model::UpstreamOAuthProviderOnConflict::default(), + }, + email: UpstreamOAuthProviderImportPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Force, + template: None, + }, + ..UpstreamOAuthProviderClaimsImports::default() + }; + + let id_token_claims = serde_json::json!({ + "preferred_username": "john", + "email": "john@example.com", + "email_verified": true, + }); + + // Grab a key to sign the id_token + // We could generate a key on the fly, but because we have one available here, + // why not use it? + let key = state + .key_store + .signing_key_for_algorithm(&JsonWebSignatureAlg::Rs256) + .unwrap(); + + let signer = key + .params() + .signing_key_for_alg(&JsonWebSignatureAlg::Rs256) + .unwrap(); + let header = JsonWebSignatureHeader::new(JsonWebSignatureAlg::Rs256); + let id_token = + Jwt::sign_with_rng(&mut rng, header, id_token_claims.clone(), &signer).unwrap(); + + // Provision a provider and a link + let mut repo = state.repository().await.unwrap(); + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + + let session = repo + .upstream_oauth_session() + .add( + &mut rng, + &state.clock, + &provider, + "state".to_owned(), + None, + None, + ) + .await + .unwrap(); + + let link = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider, + "subject".to_owned(), + None, + ) + .await + .unwrap(); + + let session = repo + .upstream_oauth_session() + .complete_with_link( + &state.clock, + session, + &link, + Some(id_token.into_string()), + Some(id_token_claims), + None, + None, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let upstream_sessions = UpstreamSessionsCookie::default() + .add(session.id, provider.id, "state".to_owned(), None) + .add_link_to_session(session.id, link.id) + .unwrap(); + let cookie_jar = upstream_sessions.save(cookie_jar, &state.clock); + cookies.import(cookie_jar); + + let request = Request::get(&*mas_router::UpstreamOAuth2Link::new(link.id).path()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + let location = response.headers().get(hyper::header::LOCATION).unwrap(); + // Grab the registration ID from the redirected URL: + // /register/steps/{id}/finish + let registration_id: Ulid = str::from_utf8(location.as_bytes()) + .unwrap() + .rsplit('/') + .nth(1) + .expect("Location to have two slashes") + .parse() + .expect("last segment of location to be a ULID"); + + // Check that we have a registered user, with the email imported + let mut repo = state.repository().await.unwrap(); + let registration: UserRegistration = repo + .user_registration() + .lookup(registration_id) + .await + .unwrap() + .expect("user registration exists"); + + assert_eq!(registration.password, None); + assert_eq!(registration.completed_at, None); + assert_eq!(registration.username, "john"); + + let email_auth_id = registration + .email_authentication_id + .expect("registration should have an email authentication"); + let email_auth: UserEmailAuthentication = repo + .user_email() + .lookup_authentication(email_auth_id) + .await + .unwrap() + .expect("email authentication should exist"); + assert_eq!(email_auth.email, "john@example.com"); + assert!(email_auth.completed_at.is_some()); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_link_existing_account(pool: PgPool) { + let existing_username = "john"; + let subject = "subject"; + + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let mut rng = state.rng(); + let cookies = CookieHelper::new(); + + let claims_imports = UpstreamOAuthProviderClaimsImports { + localpart: UpstreamOAuthProviderLocalpartPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + // This is the important bit: this will automatically link + // existing accounts if the localpart matches + on_conflict: mas_data_model::UpstreamOAuthProviderOnConflict::Add, + }, + email: UpstreamOAuthProviderImportPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + }, + ..UpstreamOAuthProviderClaimsImports::default() + }; + + //`preferred_username` matches an existing user's username + let id_token_claims = serde_json::json!({ + "preferred_username": existing_username, + "email": "any@example.com", + "email_verified": true, + }); + + let id_token = sign_token(&mut rng, &state.key_store, id_token_claims.clone()).unwrap(); + + // Provision a provider and a link + let mut repo = state.repository().await.unwrap(); + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + ui_order: 0, + }, + ) + .await + .unwrap(); + + //provision upstream authorization session to setup cookies + let (link, session) = add_linked_upstream_session( + &mut rng, + &state.clock, + &mut repo, + &provider, + subject, + &id_token.into_string(), + id_token_claims, + ) + .await + .unwrap(); + + let cookie_jar = state.cookie_jar(); + let upstream_sessions = UpstreamSessionsCookie::default() + .add(session.id, provider.id, "state".to_owned(), None) + .add_link_to_session(session.id, link.id) + .unwrap(); + let cookie_jar = upstream_sessions.save(cookie_jar, &state.clock); + cookies.import(cookie_jar); + + let user = repo + .user() + .add(&mut rng, &state.clock, existing_username.to_owned()) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let request = Request::get(&*mas_router::UpstreamOAuth2Link::new(link.id).path()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::SEE_OTHER); + + // Check that the existing user has the oidc link + let mut repo = state.repository().await.unwrap(); + + let link = repo + .upstream_oauth_link() + .find_by_subject(&provider, subject) + .await + .unwrap() + .expect("link exists"); + + assert_eq!(link.user_id, Some(user.id)); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_link_existing_account_when_not_allowed_by_default(pool: PgPool) { + let existing_username = "john"; + + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let mut rng = state.rng(); + let cookies = CookieHelper::new(); + + let claims_imports = UpstreamOAuthProviderClaimsImports { + localpart: UpstreamOAuthProviderLocalpartPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + on_conflict: mas_data_model::UpstreamOAuthProviderOnConflict::default(), + }, + email: UpstreamOAuthProviderImportPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + }, + ..UpstreamOAuthProviderClaimsImports::default() + }; + + // `preferred_username` matches an existing user's username + let id_token_claims = serde_json::json!({ + "preferred_username": existing_username, + "email": "any@example.com", + "email_verified": true, + }); + + let id_token = sign_token(&mut rng, &state.key_store, id_token_claims.clone()).unwrap(); + + // Provision a provider and a link + let mut repo = state.repository().await.unwrap(); + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + ui_order: 0, + }, + ) + .await + .unwrap(); + + let (link, session) = add_linked_upstream_session( + &mut rng, + &state.clock, + &mut repo, + &provider, + "subject", + &id_token.into_string(), + id_token_claims, + ) + .await + .unwrap(); + + // Provision an user + repo.user() + .add(&mut rng, &state.clock, existing_username.to_owned()) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let upstream_sessions = UpstreamSessionsCookie::default() + .add(session.id, provider.id, "state".to_owned(), None) + .add_link_to_session(session.id, link.id) + .unwrap(); + let cookie_jar = upstream_sessions.save(cookie_jar, &state.clock); + cookies.import(cookie_jar); + + let request = Request::get(&*mas_router::UpstreamOAuth2Link::new(link.id).path()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + + assert!(response.body().contains("Unexpected error")); + } + + fn sign_token( + rng: &mut ChaChaRng, + keystore: &Keystore, + payload: Value, + ) -> Result, mas_jose::jwt::JwtSignatureError> { + let key = keystore + .signing_key_for_algorithm(&JsonWebSignatureAlg::Rs256) + .unwrap(); + + let signer = key + .params() + .signing_key_for_alg(&JsonWebSignatureAlg::Rs256) + .unwrap(); + + let header = JsonWebSignatureHeader::new(JsonWebSignatureAlg::Rs256); + + Jwt::sign_with_rng(rng, header, payload, &signer) + } + + async fn add_linked_upstream_session( + rng: &mut ChaChaRng, + clock: &impl mas_data_model::Clock, + repo: &mut Box + Send + Sync + 'static>, + provider: &mas_data_model::UpstreamOAuthProvider, + subject: &str, + id_token: &str, + id_token_claims: Value, + ) -> Result<(UpstreamOAuthLink, UpstreamOAuthAuthorizationSession), anyhow::Error> { + let session = repo + .upstream_oauth_session() + .add( + rng, + clock, + provider, + "state".to_owned(), + None, + Some("nonce".to_owned()), + ) + .await?; + + let link = repo + .upstream_oauth_link() + .add(rng, clock, provider, subject.to_owned(), None) + .await?; + + let session = repo + .upstream_oauth_session() + .complete_with_link( + clock, + session, + &link, + Some(id_token.to_owned()), + Some(id_token_claims), + None, + None, + ) + .await?; + + Ok((link, session)) + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_link_existing_account_replace_conflict(pool: PgPool) { + let existing_username = "john"; + let subject = "subject"; + let old_subject = "old_subject"; + + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let mut rng = state.rng(); + let cookies = CookieHelper::new(); + + let claims_imports = UpstreamOAuthProviderClaimsImports { + localpart: UpstreamOAuthProviderLocalpartPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + // This will replace any existing links for this provider and user + on_conflict: mas_data_model::UpstreamOAuthProviderOnConflict::Replace, + }, + email: UpstreamOAuthProviderImportPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + }, + ..UpstreamOAuthProviderClaimsImports::default() + }; + + let id_token_claims = serde_json::json!({ + "preferred_username": existing_username, + "email": "any@example.com", + "email_verified": true, + }); + + let id_token = sign_token(&mut rng, &state.key_store, id_token_claims.clone()).unwrap(); + + // Provision a provider and a link + let mut repo = state.repository().await.unwrap(); + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + ui_order: 0, + }, + ) + .await + .unwrap(); + + // Create an existing user + let user = repo + .user() + .add(&mut rng, &state.clock, existing_username.to_owned()) + .await + .unwrap(); + + // Create an existing link for this user and provider with a different subject + let old_link = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider, + old_subject.to_owned(), + None, + ) + .await + .unwrap(); + + repo.upstream_oauth_link() + .associate_to_user(&old_link, &user) + .await + .unwrap(); + + // Provision upstream authorization session to setup cookies + let (link, session) = add_linked_upstream_session( + &mut rng, + &state.clock, + &mut repo, + &provider, + subject, + &id_token.into_string(), + id_token_claims, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let upstream_sessions = UpstreamSessionsCookie::default() + .add(session.id, provider.id, "state".to_owned(), None) + .add_link_to_session(session.id, link.id) + .unwrap(); + let cookie_jar = upstream_sessions.save(cookie_jar, &state.clock); + cookies.import(cookie_jar); + + let request = Request::get(&*mas_router::UpstreamOAuth2Link::new(link.id).path()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::SEE_OTHER); + + // Check that the new link is associated with the existing user + let mut repo = state.repository().await.unwrap(); + + let new_link = repo + .upstream_oauth_link() + .find_by_subject(&provider, subject) + .await + .unwrap() + .expect("new link exists"); + + assert_eq!(new_link.user_id, Some(user.id)); + + // Check that the old link was removed + let old_link_result = repo + .upstream_oauth_link() + .find_by_subject(&provider, old_subject) + .await + .unwrap(); + + assert!( + old_link_result.is_none(), + "Old link should have been removed" + ); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_link_existing_account_set_conflict_success(pool: PgPool) { + let existing_username = "john"; + let subject = "subject"; + + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let mut rng = state.rng(); + let cookies = CookieHelper::new(); + + let claims_imports = UpstreamOAuthProviderClaimsImports { + localpart: UpstreamOAuthProviderLocalpartPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + // This will only link if there are no existing links for this provider and user + on_conflict: mas_data_model::UpstreamOAuthProviderOnConflict::Set, + }, + email: UpstreamOAuthProviderImportPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + }, + ..UpstreamOAuthProviderClaimsImports::default() + }; + + let id_token_claims = serde_json::json!({ + "preferred_username": existing_username, + "email": "any@example.com", + "email_verified": true, + }); + + let id_token = sign_token(&mut rng, &state.key_store, id_token_claims.clone()).unwrap(); + + // Provision a provider and a link + let mut repo = state.repository().await.unwrap(); + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + ui_order: 0, + }, + ) + .await + .unwrap(); + + // Create an existing user (with no existing links for this provider) + let user = repo + .user() + .add(&mut rng, &state.clock, existing_username.to_owned()) + .await + .unwrap(); + + // Provision upstream authorization session to setup cookies + let (link, session) = add_linked_upstream_session( + &mut rng, + &state.clock, + &mut repo, + &provider, + subject, + &id_token.into_string(), + id_token_claims, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let upstream_sessions = UpstreamSessionsCookie::default() + .add(session.id, provider.id, "state".to_owned(), None) + .add_link_to_session(session.id, link.id) + .unwrap(); + let cookie_jar = upstream_sessions.save(cookie_jar, &state.clock); + cookies.import(cookie_jar); + + let request = Request::get(&*mas_router::UpstreamOAuth2Link::new(link.id).path()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::SEE_OTHER); + + // Check that the new link is associated with the existing user + let mut repo = state.repository().await.unwrap(); + + let new_link = repo + .upstream_oauth_link() + .find_by_subject(&provider, subject) + .await + .unwrap() + .expect("new link exists"); + + assert_eq!(new_link.user_id, Some(user.id)); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_link_existing_account_set_conflict_failure(pool: PgPool) { + let existing_username = "john"; + let subject = "subject"; + let old_subject = "old_subject"; + + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let mut rng = state.rng(); + let cookies = CookieHelper::new(); + + let claims_imports = UpstreamOAuthProviderClaimsImports { + localpart: UpstreamOAuthProviderLocalpartPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + // This will only link if there are no existing links for this provider and user + on_conflict: mas_data_model::UpstreamOAuthProviderOnConflict::Set, + }, + email: UpstreamOAuthProviderImportPreference { + action: mas_data_model::UpstreamOAuthProviderImportAction::Require, + template: None, + }, + ..UpstreamOAuthProviderClaimsImports::default() + }; + + let id_token_claims = serde_json::json!({ + "preferred_username": existing_username, + "email": "any@example.com", + "email_verified": true, + }); + + let id_token = sign_token(&mut rng, &state.key_store, id_token_claims.clone()).unwrap(); + + // Provision a provider and a link + let mut repo = state.repository().await.unwrap(); + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports, + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + ui_order: 0, + }, + ) + .await + .unwrap(); + + // Create an existing user + let user = repo + .user() + .add(&mut rng, &state.clock, existing_username.to_owned()) + .await + .unwrap(); + + // Create an existing link for this user and provider with a different subject + let old_link = repo + .upstream_oauth_link() + .add( + &mut rng, + &state.clock, + &provider, + old_subject.to_owned(), + None, + ) + .await + .unwrap(); + + repo.upstream_oauth_link() + .associate_to_user(&old_link, &user) + .await + .unwrap(); + + // Provision upstream authorization session to setup cookies + let (link, session) = add_linked_upstream_session( + &mut rng, + &state.clock, + &mut repo, + &provider, + subject, + &id_token.into_string(), + id_token_claims, + ) + .await + .unwrap(); + + repo.save().await.unwrap(); + + let cookie_jar = state.cookie_jar(); + let upstream_sessions = UpstreamSessionsCookie::default() + .add(session.id, provider.id, "state".to_owned(), None) + .add_link_to_session(session.id, link.id) + .unwrap(); + let cookie_jar = upstream_sessions.save(cookie_jar, &state.clock); + cookies.import(cookie_jar); + + let request = Request::get(&*mas_router::UpstreamOAuth2Link::new(link.id).path()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + + // Should return an error page because the user already has a link for this + // provider + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + + // Verify the error message is displayed + assert!(response.body().contains("User exists")); + assert!(response.body().contains("replacing upstream account links")); + + // Check that the new link was NOT associated with the existing user + let mut repo = state.repository().await.unwrap(); + + let new_link = repo + .upstream_oauth_link() + .find_by_subject(&provider, subject) + .await + .unwrap() + .expect("new link exists"); + + // The new link should still not be associated with the user + assert_eq!(new_link.user_id, None); + + // Check that the old link is still there + let old_link_result = repo + .upstream_oauth_link() + .find_by_subject(&provider, old_subject) + .await + .unwrap(); + + assert!(old_link_result.is_some(), "Old link should still exist"); + assert_eq!(old_link_result.unwrap().user_id, Some(user.id)); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/upstream_oauth2/mod.rs b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/mod.rs new file mode 100644 index 00000000..272af648 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/mod.rs @@ -0,0 +1,143 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::string::FromUtf8Error; + +use mas_data_model::{UpstreamOAuthProvider, UpstreamOAuthProviderTokenAuthMethod}; +use mas_iana::jose::JsonWebSignatureAlg; +use mas_keystore::{DecryptError, Encrypter, Keystore}; +use mas_oidc_client::types::client_credentials::ClientCredentials; +use pkcs8::DecodePrivateKey; +use serde::Deserialize; +use thiserror::Error; +use url::Url; + +pub(crate) mod authorize; +pub(crate) mod backchannel_logout; +pub(crate) mod cache; +pub(crate) mod callback; +mod cookie; +pub(crate) mod link; +mod template; + +use self::cookie::UpstreamSessions as UpstreamSessionsCookie; + +#[derive(Debug, Error)] +#[allow(clippy::enum_variant_names)] +enum ProviderCredentialsError { + #[error("Provider doesn't have a client secret")] + MissingClientSecret, + + #[error("Could not decrypt client secret")] + DecryptClientSecret { + #[from] + inner: DecryptError, + }, + + #[error("Client secret is invalid")] + InvalidClientSecret { + #[from] + inner: FromUtf8Error, + }, + + #[error("Invalid JSON in client secret")] + InvalidClientSecretJson { + #[from] + inner: serde_json::Error, + }, + + #[error("Could not parse PEM encoded private key")] + InvalidPrivateKey { + #[from] + inner: pkcs8::Error, + }, +} + +#[derive(Debug, Deserialize)] +pub struct SignInWithApple { + pub private_key: String, + pub team_id: String, + pub key_id: String, +} + +fn client_credentials_for_provider( + provider: &UpstreamOAuthProvider, + token_endpoint: &Url, + keystore: &Keystore, + encrypter: &Encrypter, +) -> Result { + let client_id = provider.client_id.clone(); + + // Decrypt the client secret + let client_secret = provider + .encrypted_client_secret + .as_deref() + .map(|encrypted_client_secret| { + let decrypted = encrypter.decrypt_string(encrypted_client_secret)?; + let decrypted = String::from_utf8(decrypted)?; + Ok::<_, ProviderCredentialsError>(decrypted) + }) + .transpose()?; + + let client_credentials = match provider.token_endpoint_auth_method { + UpstreamOAuthProviderTokenAuthMethod::None => ClientCredentials::None { client_id }, + + UpstreamOAuthProviderTokenAuthMethod::ClientSecretPost => { + ClientCredentials::ClientSecretPost { + client_id, + client_secret: client_secret + .ok_or(ProviderCredentialsError::MissingClientSecret)?, + } + } + + UpstreamOAuthProviderTokenAuthMethod::ClientSecretBasic => { + ClientCredentials::ClientSecretBasic { + client_id, + client_secret: client_secret + .ok_or(ProviderCredentialsError::MissingClientSecret)?, + } + } + + UpstreamOAuthProviderTokenAuthMethod::ClientSecretJwt => { + ClientCredentials::ClientSecretJwt { + client_id, + client_secret: client_secret + .ok_or(ProviderCredentialsError::MissingClientSecret)?, + signing_algorithm: provider + .token_endpoint_signing_alg + .clone() + .unwrap_or(JsonWebSignatureAlg::Rs256), + token_endpoint: token_endpoint.clone(), + } + } + + UpstreamOAuthProviderTokenAuthMethod::PrivateKeyJwt => ClientCredentials::PrivateKeyJwt { + client_id, + keystore: keystore.clone(), + signing_algorithm: provider + .token_endpoint_signing_alg + .clone() + .unwrap_or(JsonWebSignatureAlg::Rs256), + token_endpoint: token_endpoint.clone(), + }, + + UpstreamOAuthProviderTokenAuthMethod::SignInWithApple => { + let params = client_secret.ok_or(ProviderCredentialsError::MissingClientSecret)?; + let params: SignInWithApple = serde_json::from_str(¶ms)?; + + let key = elliptic_curve::SecretKey::from_pkcs8_pem(¶ms.private_key)?; + + ClientCredentials::SignInWithApple { + client_id, + key, + key_id: params.key_id, + team_id: params.team_id, + } + } + }; + + Ok(client_credentials) +} diff --git a/matrix-authentication-service/crates/handlers/src/upstream_oauth2/template.rs b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/template.rs new file mode 100644 index 00000000..fcf24473 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/upstream_oauth2/template.rs @@ -0,0 +1,241 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, sync::Arc}; + +use base64ct::{Base64, Base64Unpadded, Base64Url, Base64UrlUnpadded, Encoding}; +use minijinja::{ + Environment, Error, ErrorKind, Value, + value::{Enumerator, Object}, +}; + +/// Context passed to the attribute mapping template +/// +/// The variables available in the template are: +/// - `user`: claims for the user, merged from the ID token and userinfo +/// endpoint +/// - `id_token_claims`: claims from the ID token +/// - `userinfo_claims`: claims from the userinfo endpoint +/// - `extra_callback_parameters`: extra parameters passed to the callback +#[derive(Debug, Default)] +pub(crate) struct AttributeMappingContext { + id_token_claims: Option>, + extra_callback_parameters: Option, + userinfo_claims: Option, +} + +impl AttributeMappingContext { + pub fn new() -> Self { + Self::default() + } + + pub fn with_id_token_claims( + mut self, + id_token_claims: HashMap, + ) -> Self { + self.id_token_claims = Some(id_token_claims); + self + } + + pub fn with_extra_callback_parameters( + mut self, + extra_callback_parameters: serde_json::Value, + ) -> Self { + self.extra_callback_parameters = Some(extra_callback_parameters); + self + } + + pub fn with_userinfo_claims(mut self, userinfo_claims: serde_json::Value) -> Self { + self.userinfo_claims = Some(userinfo_claims); + self + } + + pub fn build(self) -> Value { + Value::from_object(self) + } +} + +impl Object for AttributeMappingContext { + fn get_value(self: &Arc, name: &Value) -> Option { + match name.as_str()? { + "user" => { + if self.id_token_claims.is_none() && self.userinfo_claims.is_none() { + return None; + } + let mut merged_user: HashMap = HashMap::new(); + if let serde_json::Value::Object(userinfo) = self + .userinfo_claims + .clone() + .unwrap_or(serde_json::Value::Null) + { + merged_user.extend(userinfo); + } + if let Some(id_token) = self.id_token_claims.clone() { + merged_user.extend(id_token); + } + Some(Value::from_serialize(merged_user)) + } + "id_token_claims" => self.id_token_claims.as_ref().map(Value::from_serialize), + "userinfo_claims" => self.userinfo_claims.as_ref().map(Value::from_serialize), + "extra_callback_parameters" => self + .extra_callback_parameters + .as_ref() + .map(Value::from_serialize), + _ => None, + } + } + + fn enumerate(self: &Arc) -> Enumerator { + let mut attrs = Vec::new(); + if self.id_token_claims.is_some() || self.userinfo_claims.is_none() { + attrs.push(minijinja::Value::from("user")); + } + if self.id_token_claims.is_some() { + attrs.push(minijinja::Value::from("id_token_claims")); + } + if self.userinfo_claims.is_some() { + attrs.push(minijinja::Value::from("userinfo_claims")); + } + if self.extra_callback_parameters.is_some() { + attrs.push(minijinja::Value::from("extra_callback_parameters")); + } + Enumerator::Values(attrs) + } +} + +fn b64decode(value: &str) -> Result { + // We're not too concerned about the performance of this filter, so we'll just + // try all the base64 variants when decoding + let bytes = Base64::decode_vec(value) + .or_else(|_| Base64Url::decode_vec(value)) + .or_else(|_| Base64Unpadded::decode_vec(value)) + .or_else(|_| Base64UrlUnpadded::decode_vec(value)) + .map_err(|e| { + Error::new( + ErrorKind::InvalidOperation, + "Failed to decode base64 string", + ) + .with_source(e) + })?; + + // It is not obvious, but the cleanest way to get a Value stored as raw bytes is + // to wrap it in an Arc, because Value implements From>> + Ok(Value::from(Arc::new(bytes))) +} + +fn b64encode(bytes: &[u8]) -> String { + Base64::encode_string(bytes) +} + +/// Decode a Tag-Length-Value encoded byte array into a map of tag to value. +fn tlvdecode(bytes: &[u8]) -> Result, Error> { + let mut iter = bytes.iter().copied(); + let mut ret = HashMap::new(); + loop { + // TODO: this assumes the tag and the length are both single bytes, which is not + // always the case with protobufs. We should properly decode varints + // here. + let Some(tag) = iter.next() else { + break; + }; + + let len = iter + .next() + .ok_or_else(|| Error::new(ErrorKind::InvalidOperation, "Invalid ILV encoding"))?; + + let mut bytes = Vec::with_capacity(len.into()); + for _ in 0..len { + bytes.push( + iter.next().ok_or_else(|| { + Error::new(ErrorKind::InvalidOperation, "Invalid ILV encoding") + })?, + ); + } + + ret.insert(tag.into(), Value::from(Arc::new(bytes))); + } + + Ok(ret) +} + +fn string(value: &Value) -> String { + value.to_string() +} + +fn from_json(value: &str) -> Result { + let value: serde_json::Value = serde_json::from_str(value).map_err(|e| { + minijinja::Error::new( + minijinja::ErrorKind::InvalidOperation, + "Failed to decode JSON", + ) + .with_source(e) + })?; + + Ok(Value::from_serialize(value)) +} + +pub fn environment() -> Environment<'static> { + let mut env = Environment::new(); + + minijinja_contrib::add_to_environment(&mut env); + + env.add_filter("b64decode", b64decode); + env.add_filter("b64encode", b64encode); + env.add_filter("tlvdecode", tlvdecode); + env.add_filter("string", string); + env.add_filter("from_json", from_json); + + env.set_unknown_method_callback(minijinja_contrib::pycompat::unknown_method_callback); + + env +} + +#[cfg(test)] +mod tests { + use super::environment; + + #[test] + fn test_split() { + let env = environment(); + let res = env + .render_str(r#"{{ 'foo, bar' | split(', ') | join(" | ") }}"#, ()) + .unwrap(); + assert_eq!(res, "foo | bar"); + } + + #[test] + fn test_ilvdecode() { + let env = environment(); + let res = env + .render_str( + r#" + {%- set tlv = 'Cg0wLTM4NS0yODA4OS0wEgRtb2Nr' | b64decode | tlvdecode -%} + {%- if tlv[18]|string != 'mock' -%} + {{ "FAIL"/0 }} + {%- endif -%} + {{- tlv[10]|string -}} + "#, + (), + ) + .unwrap(); + assert_eq!(res, "0-385-28089-0"); + } + + #[test] + fn test_base64_decode() { + let env = environment(); + + let res = env + .render_str("{{ 'cGFkZGluZw==' | b64decode }}", ()) + .unwrap(); + assert_eq!(res, "padding"); + + let res = env + .render_str("{{ 'dW5wYWRkZWQ' | b64decode }}", ()) + .unwrap(); + assert_eq!(res, "unpadded"); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/views/app.rs b/matrix-authentication-service/crates/handlers/src/views/app.rs new file mode 100644 index 00000000..4ae5f522 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/app.rs @@ -0,0 +1,89 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + extract::State, + response::{Html, IntoResponse}, +}; +use axum_extra::extract::Query; +use mas_axum_utils::{InternalError, cookies::CookieJar}; +use mas_data_model::{BoxClock, BoxRng}; +use mas_router::{PostAuthAction, UrlBuilder}; +use mas_storage::BoxRepository; +use mas_templates::{AppContext, TemplateContext, Templates}; +use serde::Deserialize; + +use crate::{ + BoundActivityTracker, PreferredLanguage, + session::{SessionOrFallback, load_session_or_fallback}, +}; + +#[derive(Deserialize)] +pub struct Params { + #[serde(default, flatten)] + action: Option, +} + +#[tracing::instrument(name = "handlers.views.app.get", skip_all)] +pub async fn get( + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + activity_tracker: BoundActivityTracker, + State(url_builder): State, + Query(Params { action }): Query, + mut repo: BoxRepository, + clock: BoxClock, + mut rng: BoxRng, + cookie_jar: CookieJar, +) -> Result { + let (cookie_jar, maybe_session) = match load_session_or_fallback( + cookie_jar, &clock, &mut rng, &templates, &locale, &mut repo, + ) + .await? + { + SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session, + .. + } => (cookie_jar, maybe_session), + SessionOrFallback::Fallback { response } => return Ok(response), + }; + + // TODO: keep the full path, not just the action + let Some(session) = maybe_session else { + return Ok(( + cookie_jar, + url_builder.redirect(&mas_router::Login::and_then( + PostAuthAction::manage_account(action), + )), + ) + .into_response()); + }; + + activity_tracker + .record_browser_session(&clock, &session) + .await; + + let ctx = AppContext::from_url_builder(&url_builder).with_language(locale); + let content = templates.render_app(&ctx)?; + + Ok((cookie_jar, Html(content)).into_response()) +} + +/// Like `get`, but allow anonymous access. +/// Used for a subset of the account management paths. +/// Needed for e.g. account recovery. +#[tracing::instrument(name = "handlers.views.app.get_anonymous", skip_all)] +pub async fn get_anonymous( + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, +) -> Result { + let ctx = AppContext::from_url_builder(&url_builder).with_language(locale); + let content = templates.render_app(&ctx)?; + + Ok(Html(content).into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/views/index.rs b/matrix-authentication-service/crates/handlers/src/views/index.rs new file mode 100644 index 00000000..0e2acccf --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/index.rs @@ -0,0 +1,63 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + extract::State, + response::{Html, IntoResponse, Response}, +}; +use mas_axum_utils::{InternalError, cookies::CookieJar, csrf::CsrfExt}; +use mas_data_model::{BoxClock, BoxRng}; +use mas_router::UrlBuilder; +use mas_storage::BoxRepository; +use mas_templates::{IndexContext, TemplateContext, Templates}; + +use crate::{ + BoundActivityTracker, + preferred_language::PreferredLanguage, + session::{SessionOrFallback, load_session_or_fallback}, +}; + +#[tracing::instrument(name = "handlers.views.index.get", skip_all)] +pub async fn get( + mut rng: BoxRng, + clock: BoxClock, + activity_tracker: BoundActivityTracker, + State(templates): State, + State(url_builder): State, + mut repo: BoxRepository, + cookie_jar: CookieJar, + PreferredLanguage(locale): PreferredLanguage, +) -> Result { + let (cookie_jar, maybe_session) = match load_session_or_fallback( + cookie_jar, &clock, &mut rng, &templates, &locale, &mut repo, + ) + .await? + { + SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session, + .. + } => (cookie_jar, maybe_session), + SessionOrFallback::Fallback { response } => return Ok(response), + }; + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + if let Some(session) = maybe_session.as_ref() { + activity_tracker + .record_browser_session(&clock, session) + .await; + } + + let ctx = IndexContext::new(url_builder.oidc_discovery()) + .maybe_with_session(maybe_session) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_index(&ctx)?; + + Ok((cookie_jar, Html(content)).into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/views/login.rs b/matrix-authentication-service/crates/handlers/src/views/login.rs new file mode 100644 index 00000000..72e1566f --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/login.rs @@ -0,0 +1,936 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::{Arc, LazyLock}; + +use axum::{ + extract::{Form, State}, + response::{Html, IntoResponse, Response}, +}; +use axum_extra::{extract::Query, typed_header::TypedHeader}; +use hyper::StatusCode; +use mas_axum_utils::{ + InternalError, SessionInfoExt, + cookies::CookieJar, + csrf::{CsrfExt, ProtectedForm}, +}; +use mas_data_model::{BoxClock, BoxRng, Clock, oauth2::LoginHint}; +use mas_i18n::DataLocale; +use mas_matrix::HomeserverConnection; +use mas_router::{UpstreamOAuth2Authorize, UrlBuilder}; +use mas_storage::{ + BoxRepository, RepositoryAccess, + upstream_oauth2::UpstreamOAuthProviderRepository, + user::{BrowserSessionRepository, UserPasswordRepository, UserRepository}, +}; +use mas_templates::{ + AccountInactiveContext, FieldError, FormError, FormState, LoginContext, LoginFormField, + PostAuthContext, PostAuthContextInner, TemplateContext, Templates, ToFormState, +}; +use opentelemetry::{Key, KeyValue, metrics::Counter}; +use rand::Rng; +use serde::{Deserialize, Serialize}; +use zeroize::Zeroizing; + +use super::shared::OptionalPostAuthAction; +use crate::{ + BoundActivityTracker, Limiter, METER, PreferredLanguage, RequesterFingerprint, SiteConfig, + passwords::{PasswordManager, PasswordVerificationResult}, + session::{SessionOrFallback, load_session_or_fallback}, +}; + +static PASSWORD_LOGIN_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.user.password_login_attempt") + .with_description("Number of password login attempts") + .with_unit("{attempt}") + .build() +}); +const RESULT: Key = Key::from_static_str("result"); + +#[derive(Debug, Deserialize, Serialize)] +pub(crate) struct LoginForm { + username: String, + password: String, +} + +impl ToFormState for LoginForm { + type Field = LoginFormField; +} + +#[tracing::instrument(name = "handlers.views.login.get", skip_all)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + State(site_config): State, + State(homeserver): State>, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + Query(query): Query, + cookie_jar: CookieJar, +) -> Result { + let (cookie_jar, maybe_session) = match load_session_or_fallback( + cookie_jar, &clock, &mut rng, &templates, &locale, &mut repo, + ) + .await? + { + SessionOrFallback::MaybeSession { + cookie_jar, + maybe_session, + .. + } => (cookie_jar, maybe_session), + SessionOrFallback::Fallback { response } => return Ok(response), + }; + + if let Some(session) = maybe_session { + activity_tracker + .record_browser_session(&clock, &session) + .await; + + let reply = query.go_next(&url_builder); + return Ok((cookie_jar, reply).into_response()); + } + + let providers = repo.upstream_oauth_provider().all_enabled().await?; + + // If password-based login is disabled, and there is only one upstream provider, + // we can directly start an authorization flow + if !site_config.password_login_enabled && providers.len() == 1 { + let provider = providers.into_iter().next().unwrap(); + + let mut destination = UpstreamOAuth2Authorize::new(provider.id); + + if let Some(action) = query.post_auth_action { + destination = destination.and_then(action); + } + + return Ok((cookie_jar, url_builder.redirect(&destination)).into_response()); + } + + render( + locale, + cookie_jar, + FormState::default(), + query, + &mut repo, + &clock, + &mut rng, + &templates, + &homeserver, + &site_config, + ) + .await +} + +#[tracing::instrument(name = "handlers.views.login.post", skip_all)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(password_manager): State, + State(site_config): State, + State(templates): State, + State(url_builder): State, + State(limiter): State, + State(homeserver): State>, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + requester: RequesterFingerprint, + Query(query): Query, + cookie_jar: CookieJar, + user_agent: Option>, + Form(form): Form>, +) -> Result { + let user_agent = user_agent.map(|ua| ua.as_str().to_owned()); + if !site_config.password_login_enabled { + // XXX: is it necessary to have better errors here? + return Ok(StatusCode::METHOD_NOT_ALLOWED.into_response()); + } + + let form = cookie_jar.verify_form(&clock, form)?; + + // Validate the form + let mut form_state = form.to_form_state(); + + if form.username.is_empty() { + form_state.add_error_on_field(LoginFormField::Username, FieldError::Required); + } + + if form.password.is_empty() { + form_state.add_error_on_field(LoginFormField::Password, FieldError::Required); + } + + if !form_state.is_valid() { + tracing::warn!("Invalid login form: {form_state:?}"); + PASSWORD_LOGIN_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + return render( + locale, + cookie_jar, + form_state, + query, + &mut repo, + &clock, + &mut rng, + &templates, + &homeserver, + &site_config, + ) + .await; + } + + // Extract the localpart of the MXID, fallback to the bare username + let username = homeserver + .localpart(&form.username) + .unwrap_or(&form.username); + + // First, lookup the user + let Some(user) = get_user_by_email_or_by_username(&site_config, &mut repo, username).await? + else { + tracing::warn!(username, "User not found"); + let form_state = form_state.with_error_on_form(FormError::InvalidCredentials); + PASSWORD_LOGIN_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + return render( + locale, + cookie_jar, + form_state, + query, + &mut repo, + &clock, + &mut rng, + &templates, + &homeserver, + &site_config, + ) + .await; + }; + + // Check the rate limit + if let Err(e) = limiter.check_password(requester, &user) { + tracing::warn!(error = &e as &dyn std::error::Error, "ratelimit exceeded"); + let form_state = form_state.with_error_on_form(FormError::RateLimitExceeded); + PASSWORD_LOGIN_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + return render( + locale, + cookie_jar, + form_state, + query, + &mut repo, + &clock, + &mut rng, + &templates, + &homeserver, + &site_config, + ) + .await; + } + + // And its password + let Some(user_password) = repo.user_password().active(&user).await? else { + // There is no password for this user, but we don't want to disclose that. Show + // a generic 'invalid credentials' error instead + tracing::warn!(username, "No password for user"); + let form_state = form_state.with_error_on_form(FormError::InvalidCredentials); + PASSWORD_LOGIN_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + return render( + locale, + cookie_jar, + form_state, + query, + &mut repo, + &clock, + &mut rng, + &templates, + &homeserver, + &site_config, + ) + .await; + }; + + let password = Zeroizing::new(form.password); + + // Verify the password, and upgrade it on-the-fly if needed + let user_password = match password_manager + .verify_and_upgrade( + &mut rng, + user_password.version, + password, + user_password.hashed_password.clone(), + ) + .await + { + Ok(PasswordVerificationResult::Success(Some((version, new_password_hash)))) => { + // Save the upgraded password + repo.user_password() + .add( + &mut rng, + &clock, + &user, + version, + new_password_hash, + Some(&user_password), + ) + .await? + } + Ok(PasswordVerificationResult::Success(None)) => user_password, + Ok(PasswordVerificationResult::Failure) => { + tracing::warn!(username, "Failed to verify/upgrade password for user"); + let form_state = form_state.with_error_on_form(FormError::InvalidCredentials); + PASSWORD_LOGIN_COUNTER.add(1, &[KeyValue::new(RESULT, "mismatch")]); + return render( + locale, + cookie_jar, + form_state, + query, + &mut repo, + &clock, + &mut rng, + &templates, + &homeserver, + &site_config, + ) + .await; + } + Err(err) => return Err(InternalError::from_anyhow(err)), + }; + + // Now that we have checked the user password, we now want to show an error if + // the user is locked or deactivated + if user.deactivated_at.is_some() { + tracing::warn!(username, "User is deactivated"); + PASSWORD_LOGIN_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let ctx = AccountInactiveContext::new(user) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + let content = templates.render_account_deactivated(&ctx)?; + return Ok((cookie_jar, Html(content)).into_response()); + } + + if user.locked_at.is_some() { + tracing::warn!(username, "User is locked"); + PASSWORD_LOGIN_COUNTER.add(1, &[KeyValue::new(RESULT, "error")]); + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let ctx = AccountInactiveContext::new(user) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + let content = templates.render_account_locked(&ctx)?; + return Ok((cookie_jar, Html(content)).into_response()); + } + + // At this point, we should have a 'valid' user. In case we missed something, we + // want it to crash in tests/debug builds + debug_assert!(user.is_valid()); + + // Start a new session + let user_session = repo + .browser_session() + .add(&mut rng, &clock, &user, user_agent) + .await?; + + // And mark it as authenticated by the password + repo.browser_session() + .authenticate_with_password(&mut rng, &clock, &user_session, &user_password) + .await?; + + repo.save().await?; + + PASSWORD_LOGIN_COUNTER.add(1, &[KeyValue::new(RESULT, "success")]); + + activity_tracker + .record_browser_session(&clock, &user_session) + .await; + + let cookie_jar = cookie_jar.set_session(&user_session); + let reply = query.go_next(&url_builder); + Ok((cookie_jar, reply).into_response()) +} + +async fn get_user_by_email_or_by_username( + site_config: &SiteConfig, + repo: &mut R, + username_or_email: &str, +) -> Result, R::Error> { + if site_config.login_with_email_allowed && username_or_email.contains('@') { + let maybe_user_email = repo.user_email().find_by_email(username_or_email).await?; + + if let Some(user_email) = maybe_user_email { + let user = repo.user().lookup(user_email.user_id).await?; + + if user.is_some() { + return Ok(user); + } + } + } + + let user = repo.user().find_by_username(username_or_email).await?; + + Ok(user) +} + +fn handle_login_hint( + mut ctx: LoginContext, + next: &PostAuthContext, + homeserver: &dyn HomeserverConnection, + site_config: &SiteConfig, +) -> LoginContext { + let form_state = ctx.form_state_mut(); + + // Do not override username if coming from a failed login attempt + if form_state.has_value(LoginFormField::Username) { + return ctx; + } + + if let PostAuthContextInner::ContinueAuthorizationGrant { ref grant } = next.ctx { + let value = match grant.parse_login_hint(homeserver.homeserver()) { + LoginHint::MXID(mxid) => Some(mxid.localpart().to_owned()), + LoginHint::Email(email) if site_config.login_with_email_allowed => { + Some(email.to_string()) + } + _ => None, + }; + form_state.set_value(LoginFormField::Username, value); + } + + ctx +} + +async fn render( + locale: DataLocale, + cookie_jar: CookieJar, + form_state: FormState, + action: OptionalPostAuthAction, + repo: &mut impl RepositoryAccess, + clock: &impl Clock, + rng: impl Rng, + templates: &Templates, + homeserver: &dyn HomeserverConnection, + site_config: &SiteConfig, +) -> Result { + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(clock, rng); + let providers = repo.upstream_oauth_provider().all_enabled().await?; + + let ctx = LoginContext::default() + .with_form_state(form_state) + .with_upstream_providers(providers); + + let next = action + .load_context(repo) + .await + .map_err(InternalError::from_anyhow)?; + let ctx = if let Some(next) = next { + let ctx = handle_login_hint(ctx, &next, homeserver, site_config); + ctx.with_post_action(next) + } else { + ctx + }; + let ctx = ctx.with_csrf(csrf_token.form_value()).with_language(locale); + + let content = templates.render_login(&ctx)?; + Ok((cookie_jar, Html(content)).into_response()) +} + +#[cfg(test)] +mod test { + use hyper::{ + Request, StatusCode, + header::{CONTENT_TYPE, LOCATION}, + }; + use mas_data_model::{ + UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderTokenAuthMethod, + }; + use mas_iana::jose::JsonWebSignatureAlg; + use mas_router::Route; + use mas_storage::{ + RepositoryAccess, + upstream_oauth2::{UpstreamOAuthProviderParams, UpstreamOAuthProviderRepository}, + }; + use mas_templates::escape_html; + use oauth2_types::scope::OPENID; + use sqlx::PgPool; + use zeroize::Zeroizing; + + use crate::{ + SiteConfig, + test_utils::{ + CookieHelper, RequestBuilderExt, ResponseExt, TestState, setup, test_site_config, + }, + }; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_disabled(pool: PgPool) { + setup(); + let state = TestState::from_pool_with_site_config( + pool, + SiteConfig { + password_login_enabled: false, + ..test_site_config() + }, + ) + .await + .unwrap(); + + let mut rng = state.rng(); + + // Without password login and no upstream providers, we should get an error + // message + let response = state.request(Request::get("/login").empty()).await; + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + assert!( + response.body().contains("No login methods available"), + "Response body: {}", + response.body() + ); + + // Adding an upstream provider should redirect to it + let mut repo = state.repository().await.unwrap(); + let first_provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + UpstreamOAuthProviderParams { + issuer: Some("https://first.com/".to_owned()), + human_name: Some("First Ltd.".to_owned()), + brand_name: None, + scope: [OPENID].into_iter().collect(), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + repo.save().await.unwrap(); + + let first_provider_login = mas_router::UpstreamOAuth2Authorize::new(first_provider.id); + + let response = state.request(Request::get("/login").empty()).await; + response.assert_status(StatusCode::SEE_OTHER); + response.assert_header_value(LOCATION, &first_provider_login.path_and_query()); + + // Adding a second provider should show a login page with both providers + let mut repo = state.repository().await.unwrap(); + let second_provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &state.clock, + UpstreamOAuthProviderParams { + issuer: Some("https://second.com/".to_owned()), + human_name: None, + brand_name: None, + scope: [OPENID].into_iter().collect(), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 1, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + repo.save().await.unwrap(); + + let second_provider_login = mas_router::UpstreamOAuth2Authorize::new(second_provider.id); + + let response = state.request(Request::get("/login").empty()).await; + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + assert!(response.body().contains(&escape_html("First Ltd."))); + assert!( + response + .body() + .contains(&escape_html(&first_provider_login.path_and_query())) + ); + assert!(response.body().contains(&escape_html("second.com"))); + assert!( + response + .body() + .contains(&escape_html(&second_provider_login.path_and_query())) + ); + } + + async fn user_with_password( + state: &TestState, + username: &str, + password: &str, + ) -> mas_data_model::User { + let mut rng = state.rng(); + let mut repo = state.repository().await.unwrap(); + let user = repo + .user() + .add(&mut rng, &state.clock, username.to_owned()) + .await + .unwrap(); + let (version, hash) = state + .password_manager + .hash(&mut rng, Zeroizing::new(password.to_owned())) + .await + .unwrap(); + repo.user_password() + .add(&mut rng, &state.clock, &user, version, hash, None) + .await + .unwrap(); + repo.save().await.unwrap(); + user + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_login(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let cookies = CookieHelper::new(); + + // Provision a user with a password + user_with_password(&state, "john", "hunter2").await; + + // Render the login page to get a CSRF token + let request = Request::get("/login").empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the login form + let request = Request::post("/login").form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "password": "hunter2", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::SEE_OTHER); + + // Now if we get to the home page, we should see the user's username + let request = Request::get("/").empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + assert!(response.body().contains("john")); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_login_with_mxid(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let cookies = CookieHelper::new(); + + // Provision a user with a password + user_with_password(&state, "john", "hunter2").await; + + // Render the login page to get a CSRF token + let request = Request::get("/login").empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the login form + let request = Request::post("/login").form(serde_json::json!({ + "csrf": csrf_token, + "username": "@john:example.com", + "password": "hunter2", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::SEE_OTHER); + + // Now if we get to the home page, we should see the user's username + let request = Request::get("/").empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + assert!(response.body().contains("john")); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_login_with_mxid_wrong_server(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let cookies = CookieHelper::new(); + + // Provision a user with a password + user_with_password(&state, "john", "hunter2").await; + + // Render the login page to get a CSRF token + let request = Request::get("/login").empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the login form + let request = Request::post("/login").form(serde_json::json!({ + "csrf": csrf_token, + "username": "@john:something.corp", + "password": "hunter2", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + + // This shouldn't have worked, we're back on the login page + response.assert_status(StatusCode::OK); + assert!(response.body().contains("Invalid credentials")); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_login_rate_limit(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let mut rng = state.rng(); + let cookies = CookieHelper::new(); + + // Provision a user without a password. + // We don't give that user a password, so that we skip hashing it in this test. + // It will still be rate-limited + let mut repo = state.repository().await.unwrap(); + repo.user() + .add(&mut rng, &state.clock, "john".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + // Render the login page to get a CSRF token + let request = Request::get("/login").empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the login form + let request = Request::post("/login").form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "password": "hunter2", + })); + let request = cookies.with_cookies(request); + + // First three attempts should just tell about the invalid credentials + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::OK); + let body = response.body(); + assert!(body.contains("Invalid credentials")); + assert!(!body.contains("too many requests")); + + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::OK); + let body = response.body(); + assert!(body.contains("Invalid credentials")); + assert!(!body.contains("too many requests")); + + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::OK); + let body = response.body(); + assert!(body.contains("Invalid credentials")); + assert!(!body.contains("too many requests")); + + // The fourth attempt should be rate-limited + let response = state.request(request.clone()).await; + response.assert_status(StatusCode::OK); + let body = response.body(); + assert!(!body.contains("Invalid credentials")); + assert!(body.contains("too many requests")); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_login_locked_account(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let cookies = CookieHelper::new(); + + // Provision a user with a password + let user = user_with_password(&state, "john", "hunter2").await; + + // Lock the user + let mut repo = state.repository().await.unwrap(); + repo.user().lock(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + // Render the login page to get a CSRF token + let request = Request::get("/login").empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the login form + let request = Request::post("/login").form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "password": "hunter2", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + assert!(response.body().contains("Account locked")); + + // A bad password should not disclose that the account is locked + let request = Request::post("/login").form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "password": "badpassword", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + assert!(!response.body().contains("Account locked")); + assert!(response.body().contains("Invalid credentials")); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_login_deactivated_account(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let cookies = CookieHelper::new(); + + // Provision a user with a password + let user = user_with_password(&state, "john", "hunter2").await; + + // Deactivate the user + let mut repo = state.repository().await.unwrap(); + repo.user().deactivate(&state.clock, user).await.unwrap(); + repo.save().await.unwrap(); + + // Render the login page to get a CSRF token + let request = Request::get("/login").empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the login form + let request = Request::post("/login").form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "password": "hunter2", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + assert!(response.body().contains("Account deleted")); + + // A bad password should not disclose that the account is deleted + let request = Request::post("/login").form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "password": "badpassword", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + assert!(!response.body().contains("Account deleted")); + assert!(response.body().contains("Invalid credentials")); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/views/logout.rs b/matrix-authentication-service/crates/handlers/src/views/logout.rs new file mode 100644 index 00000000..8acb9d6d --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/logout.rs @@ -0,0 +1,61 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + extract::{Form, State}, + response::IntoResponse, +}; +use mas_axum_utils::{ + InternalError, SessionInfoExt, + cookies::CookieJar, + csrf::{CsrfExt, ProtectedForm}, +}; +use mas_data_model::BoxClock; +use mas_router::{PostAuthAction, UrlBuilder}; +use mas_storage::{BoxRepository, user::BrowserSessionRepository}; + +use crate::BoundActivityTracker; + +#[tracing::instrument(name = "handlers.views.logout.post", skip_all)] +pub(crate) async fn post( + clock: BoxClock, + mut repo: BoxRepository, + cookie_jar: CookieJar, + State(url_builder): State, + activity_tracker: BoundActivityTracker, + Form(form): Form>>, +) -> Result { + let form = cookie_jar.verify_form(&clock, form)?; + + let (session_info, cookie_jar) = cookie_jar.session_info(); + + if let Some(session_id) = session_info.current_session_id() { + let maybe_session = repo.browser_session().lookup(session_id).await?; + if let Some(session) = maybe_session + && session.finished_at.is_none() + { + activity_tracker + .record_browser_session(&clock, &session) + .await; + + repo.browser_session().finish(&clock, session).await?; + } + } + + repo.save().await?; + + // We always want to clear out the session cookie, even if the session was + // invalid + let cookie_jar = cookie_jar.update_session_info(&session_info.mark_session_ended()); + + let destination = if let Some(action) = form { + action.go_next(&url_builder) + } else { + url_builder.redirect(&mas_router::Login::default()) + }; + + Ok((cookie_jar, destination)) +} diff --git a/matrix-authentication-service/crates/handlers/src/views/mod.rs b/matrix-authentication-service/crates/handlers/src/views/mod.rs new file mode 100644 index 00000000..bc070cda --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/mod.rs @@ -0,0 +1,13 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +pub mod app; +pub mod index; +pub mod login; +pub mod logout; +pub mod recovery; +pub mod register; +pub mod shared; diff --git a/matrix-authentication-service/crates/handlers/src/views/recovery/mod.rs b/matrix-authentication-service/crates/handlers/src/views/recovery/mod.rs new file mode 100644 index 00000000..630e9905 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/recovery/mod.rs @@ -0,0 +1,8 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +pub mod progress; +pub mod start; diff --git a/matrix-authentication-service/crates/handlers/src/views/recovery/progress.rs b/matrix-authentication-service/crates/handlers/src/views/recovery/progress.rs new file mode 100644 index 00000000..06e71f17 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/recovery/progress.rs @@ -0,0 +1,156 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + Form, + extract::{Path, State}, + response::{Html, IntoResponse, Response}, +}; +use hyper::StatusCode; +use mas_axum_utils::{ + InternalError, SessionInfoExt, + cookies::CookieJar, + csrf::{CsrfExt, ProtectedForm}, +}; +use mas_data_model::{BoxClock, BoxRng, SiteConfig}; +use mas_router::UrlBuilder; +use mas_storage::{ + BoxRepository, + queue::{QueueJobRepositoryExt as _, SendAccountRecoveryEmailsJob}, +}; +use mas_templates::{EmptyContext, RecoveryProgressContext, TemplateContext, Templates}; +use ulid::Ulid; + +use crate::{Limiter, PreferredLanguage, RequesterFingerprint}; + +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + State(site_config): State, + State(templates): State, + State(url_builder): State, + PreferredLanguage(locale): PreferredLanguage, + cookie_jar: CookieJar, + Path(id): Path, +) -> Result { + if !site_config.account_recovery_allowed { + let context = EmptyContext.with_language(locale); + let rendered = templates.render_recovery_disabled(&context)?; + return Ok((cookie_jar, Html(rendered)).into_response()); + } + + let (session_info, cookie_jar) = cookie_jar.session_info(); + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let maybe_session = session_info.load_active_session(&mut repo).await?; + if maybe_session.is_some() { + // TODO: redirect to continue whatever action was going on + return Ok((cookie_jar, url_builder.redirect(&mas_router::Index)).into_response()); + } + + let Some(recovery_session) = repo.user_recovery().lookup_session(id).await? else { + // XXX: is that the right thing to do? + return Ok(( + cookie_jar, + url_builder.redirect(&mas_router::AccountRecoveryStart), + ) + .into_response()); + }; + + if recovery_session.consumed_at.is_some() { + let context = EmptyContext.with_language(locale); + let rendered = templates.render_recovery_consumed(&context)?; + return Ok((cookie_jar, Html(rendered)).into_response()); + } + + let context = RecoveryProgressContext::new(recovery_session, false) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + repo.save().await?; + + let rendered = templates.render_recovery_progress(&context)?; + + Ok((cookie_jar, Html(rendered)).into_response()) +} + +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + State(site_config): State, + State(templates): State, + State(url_builder): State, + (State(limiter), requester): (State, RequesterFingerprint), + PreferredLanguage(locale): PreferredLanguage, + cookie_jar: CookieJar, + Path(id): Path, + Form(form): Form>, +) -> Result { + if !site_config.account_recovery_allowed { + let context = EmptyContext.with_language(locale); + let rendered = templates.render_recovery_disabled(&context)?; + return Ok((cookie_jar, Html(rendered)).into_response()); + } + + let (session_info, cookie_jar) = cookie_jar.session_info(); + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let maybe_session = session_info.load_active_session(&mut repo).await?; + if maybe_session.is_some() { + // TODO: redirect to continue whatever action was going on + return Ok((cookie_jar, url_builder.redirect(&mas_router::Index)).into_response()); + } + + let Some(recovery_session) = repo.user_recovery().lookup_session(id).await? else { + // XXX: is that the right thing to do? + return Ok(( + cookie_jar, + url_builder.redirect(&mas_router::AccountRecoveryStart), + ) + .into_response()); + }; + + if recovery_session.consumed_at.is_some() { + let context = EmptyContext.with_language(locale); + let rendered = templates.render_recovery_consumed(&context)?; + return Ok((cookie_jar, Html(rendered)).into_response()); + } + + // Verify the CSRF token + let () = cookie_jar.verify_form(&clock, form)?; + + // Check the rate limit if we are about to process the form + if let Err(e) = limiter.check_account_recovery(requester, &recovery_session.email) { + tracing::warn!(error = &e as &dyn std::error::Error); + let context = RecoveryProgressContext::new(recovery_session, true) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + let rendered = templates.render_recovery_progress(&context)?; + + return Ok((StatusCode::TOO_MANY_REQUESTS, (cookie_jar, Html(rendered))).into_response()); + } + + // Schedule a new batch of emails + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + SendAccountRecoveryEmailsJob::new(&recovery_session), + ) + .await?; + + repo.save().await?; + + let context = RecoveryProgressContext::new(recovery_session, false) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let rendered = templates.render_recovery_progress(&context)?; + + Ok((cookie_jar, Html(rendered)).into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/views/recovery/start.rs b/matrix-authentication-service/crates/handlers/src/views/recovery/start.rs new file mode 100644 index 00000000..f877a04d --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/recovery/start.rs @@ -0,0 +1,163 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::str::FromStr; + +use axum::{ + Form, + extract::State, + response::{Html, IntoResponse, Response}, +}; +use axum_extra::typed_header::TypedHeader; +use lettre::Address; +use mas_axum_utils::{ + InternalError, SessionInfoExt, + cookies::CookieJar, + csrf::{CsrfExt, ProtectedForm}, +}; +use mas_data_model::{BoxClock, BoxRng, SiteConfig}; +use mas_router::UrlBuilder; +use mas_storage::{ + BoxRepository, + queue::{QueueJobRepositoryExt as _, SendAccountRecoveryEmailsJob}, +}; +use mas_templates::{ + EmptyContext, FieldError, FormError, FormState, RecoveryStartContext, RecoveryStartFormField, + TemplateContext, Templates, +}; +use serde::{Deserialize, Serialize}; + +use crate::{BoundActivityTracker, Limiter, PreferredLanguage, RequesterFingerprint}; + +#[derive(Deserialize, Serialize)] +pub(crate) struct StartRecoveryForm { + email: String, +} + +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + State(site_config): State, + State(templates): State, + State(url_builder): State, + PreferredLanguage(locale): PreferredLanguage, + cookie_jar: CookieJar, +) -> Result { + if !site_config.account_recovery_allowed { + let context = EmptyContext.with_language(locale); + let rendered = templates.render_recovery_disabled(&context)?; + return Ok((cookie_jar, Html(rendered)).into_response()); + } + + let (session_info, cookie_jar) = cookie_jar.session_info(); + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let maybe_session = session_info.load_active_session(&mut repo).await?; + if maybe_session.is_some() { + // TODO: redirect to continue whatever action was going on + return Ok((cookie_jar, url_builder.redirect(&mas_router::Index)).into_response()); + } + + let context = RecoveryStartContext::new() + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + repo.save().await?; + + let rendered = templates.render_recovery_start(&context)?; + + Ok((cookie_jar, Html(rendered)).into_response()) +} + +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + user_agent: TypedHeader, + activity_tracker: BoundActivityTracker, + State(site_config): State, + State(templates): State, + State(url_builder): State, + (State(limiter), requester): (State, RequesterFingerprint), + PreferredLanguage(locale): PreferredLanguage, + cookie_jar: CookieJar, + Form(form): Form>, +) -> Result { + if !site_config.account_recovery_allowed { + let context = EmptyContext.with_language(locale); + let rendered = templates.render_recovery_disabled(&context)?; + return Ok((cookie_jar, Html(rendered)).into_response()); + } + + let (session_info, cookie_jar) = cookie_jar.session_info(); + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let maybe_session = session_info.load_active_session(&mut repo).await?; + if maybe_session.is_some() { + // TODO: redirect to continue whatever action was going on + return Ok((cookie_jar, url_builder.redirect(&mas_router::Index)).into_response()); + } + + let user_agent = user_agent.as_str().to_owned(); + let ip_address = activity_tracker.ip(); + + let form = cookie_jar.verify_form(&clock, form)?; + let mut form_state = FormState::from_form(&form); + + if Address::from_str(&form.email).is_err() { + form_state = + form_state.with_error_on_field(RecoveryStartFormField::Email, FieldError::Invalid); + } + + if form_state.is_valid() { + // Check the rate limit if we are about to process the form + if let Err(e) = limiter.check_account_recovery(requester, &form.email) { + tracing::warn!(error = &e as &dyn std::error::Error); + form_state.add_error_on_form(FormError::RateLimitExceeded); + } + } + + if !form_state.is_valid() { + repo.save().await?; + let context = RecoveryStartContext::new() + .with_form_state(form_state) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let rendered = templates.render_recovery_start(&context)?; + + return Ok((cookie_jar, Html(rendered)).into_response()); + } + + let session = repo + .user_recovery() + .add_session( + &mut rng, + &clock, + form.email, + user_agent, + ip_address, + locale.to_string(), + ) + .await?; + + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + SendAccountRecoveryEmailsJob::new(&session), + ) + .await?; + + repo.save().await?; + + Ok(( + cookie_jar, + url_builder.redirect(&mas_router::AccountRecoveryProgress::new(session.id)), + ) + .into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/views/register/cookie.rs b/matrix-authentication-service/crates/handlers/src/views/register/cookie.rs new file mode 100644 index 00000000..ab2d7522 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/register/cookie.rs @@ -0,0 +1,102 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// TODO: move that to a standalone cookie manager + +use std::collections::BTreeSet; + +use chrono::{DateTime, Duration, Utc}; +use mas_axum_utils::cookies::CookieJar; +use mas_data_model::{Clock, UserRegistration}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::Ulid; + +/// Name of the cookie +static COOKIE_NAME: &str = "user-registration-sessions"; + +/// Sessions expire after an hour +static SESSION_MAX_TIME: Duration = Duration::hours(1); + +/// The content of the cookie, which stores a list of user registration IDs +#[derive(Serialize, Deserialize, Default, Debug)] +pub struct UserRegistrationSessions(BTreeSet); + +#[derive(Debug, Error, PartialEq, Eq)] +#[error("user registration session not found")] +pub struct UserRegistrationSessionNotFound; + +impl UserRegistrationSessions { + /// Load the user registration sessions cookie + pub fn load(cookie_jar: &CookieJar) -> Self { + match cookie_jar.load(COOKIE_NAME) { + Ok(Some(sessions)) => sessions, + Ok(None) => Self::default(), + Err(e) => { + tracing::warn!( + error = &e as &dyn std::error::Error, + "Invalid upstream sessions cookie" + ); + Self::default() + } + } + } + + /// Returns true if the cookie is empty + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Save the user registration sessions to the cookie jar + pub fn save(self, cookie_jar: CookieJar, clock: &C) -> CookieJar + where + C: Clock, + { + let this = self.expire(clock.now()); + + if this.is_empty() { + cookie_jar.remove(COOKIE_NAME) + } else { + cookie_jar.save(COOKIE_NAME, &this, false) + } + } + + fn expire(mut self, now: DateTime) -> Self { + self.0.retain(|id| { + let Ok(ts) = id.timestamp_ms().try_into() else { + return false; + }; + let Some(when) = DateTime::from_timestamp_millis(ts) else { + return false; + }; + now - when < SESSION_MAX_TIME + }); + + self + } + + /// Add a new session, for a provider and a random state + pub fn add(mut self, user_registration: &UserRegistration) -> Self { + self.0.insert(user_registration.id); + self + } + + /// Check if the session is in the list + pub fn contains(&self, user_registration: &UserRegistration) -> bool { + self.0.contains(&user_registration.id) + } + + /// Mark a link as consumed to avoid replay + pub fn consume_session( + mut self, + user_registration: &UserRegistration, + ) -> Result { + if !self.0.remove(&user_registration.id) { + return Err(UserRegistrationSessionNotFound); + } + + Ok(self) + } +} diff --git a/matrix-authentication-service/crates/handlers/src/views/register/mod.rs b/matrix-authentication-service/crates/handlers/src/views/register/mod.rs new file mode 100644 index 00000000..6a51852a --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/register/mod.rs @@ -0,0 +1,95 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use axum::{ + extract::State, + response::{Html, IntoResponse, Response}, +}; +use axum_extra::extract::Query; +use mas_axum_utils::{InternalError, SessionInfoExt, cookies::CookieJar, csrf::CsrfExt as _}; +use mas_data_model::{BoxClock, BoxRng, SiteConfig}; +use mas_router::{PasswordRegister, UpstreamOAuth2Authorize, UrlBuilder}; +use mas_storage::BoxRepository; +use mas_templates::{RegisterContext, TemplateContext, Templates}; + +use super::shared::OptionalPostAuthAction; +use crate::{BoundActivityTracker, PreferredLanguage}; + +mod cookie; +pub(crate) mod password; +pub(crate) mod steps; + +pub use self::cookie::UserRegistrationSessions as UserRegistrationSessionsCookie; + +#[tracing::instrument(name = "handlers.views.register.get", skip_all)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + State(site_config): State, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + Query(query): Query, + cookie_jar: CookieJar, +) -> Result { + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let (session_info, cookie_jar) = cookie_jar.session_info(); + + let maybe_session = session_info.load_active_session(&mut repo).await?; + + if let Some(session) = maybe_session { + activity_tracker + .record_browser_session(&clock, &session) + .await; + + let reply = query.go_next(&url_builder); + return Ok((cookie_jar, reply).into_response()); + } + + let providers = repo.upstream_oauth_provider().all_enabled().await?; + + // If password-based login is disabled, and there is only one upstream provider, + // we can directly start an authorization flow + if !site_config.password_registration_enabled && providers.len() == 1 { + let provider = providers.into_iter().next().unwrap(); + + let mut destination = UpstreamOAuth2Authorize::new(provider.id); + + if let Some(action) = query.post_auth_action { + destination = destination.and_then(action); + } + + return Ok((cookie_jar, url_builder.redirect(&destination)).into_response()); + } + + // If password-based registration is enabled and there are no upstream + // providers, we redirect to the password registration page + if site_config.password_registration_enabled && providers.is_empty() { + let mut destination = PasswordRegister::default(); + + if let Some(action) = query.post_auth_action { + destination = destination.and_then(action); + } + + return Ok((cookie_jar, url_builder.redirect(&destination)).into_response()); + } + + let mut ctx = RegisterContext::new(providers); + let post_action = query + .load_context(&mut repo) + .await + .map_err(InternalError::from_anyhow)?; + if let Some(action) = post_action { + ctx = ctx.with_post_action(action); + } + + let ctx = ctx.with_csrf(csrf_token.form_value()).with_language(locale); + + let content = templates.render_register(&ctx)?; + + Ok((cookie_jar, Html(content)).into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/views/register/password.rs b/matrix-authentication-service/crates/handlers/src/views/register/password.rs new file mode 100644 index 00000000..65ba5fe0 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/register/password.rs @@ -0,0 +1,1046 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{str::FromStr, sync::Arc}; + +use axum::{ + extract::{Form, State}, + response::{Html, IntoResponse, Response}, +}; +use axum_extra::{extract::Query, typed_header::TypedHeader}; +use hyper::StatusCode; +use lettre::Address; +use mas_axum_utils::{ + InternalError, SessionInfoExt, + cookies::CookieJar, + csrf::{CsrfExt, CsrfToken, ProtectedForm}, +}; +use mas_data_model::{BoxClock, BoxRng, CaptchaConfig}; +use mas_i18n::DataLocale; +use mas_matrix::HomeserverConnection; +use mas_policy::Policy; +use mas_router::UrlBuilder; +use mas_storage::{ + BoxRepository, RepositoryAccess, + queue::{QueueJobRepositoryExt as _, SendEmailAuthenticationCodeJob}, + user::{UserEmailRepository, UserRepository}, +}; +use mas_templates::{ + FieldError, FormError, FormState, PasswordRegisterContext, RegisterFormField, TemplateContext, + Templates, ToFormState, +}; +use serde::{Deserialize, Serialize}; +use zeroize::Zeroizing; + +use super::cookie::UserRegistrationSessions; +use crate::{ + BoundActivityTracker, Limiter, PreferredLanguage, RequesterFingerprint, SiteConfig, + captcha::Form as CaptchaForm, passwords::PasswordManager, + views::shared::OptionalPostAuthAction, +}; + +#[derive(Debug, Deserialize, Serialize)] +pub(crate) struct RegisterForm { + username: String, + #[serde(default)] + email: String, + password: String, + password_confirm: String, + #[serde(default)] + accept_terms: String, + + #[serde(flatten, skip_serializing)] + captcha: CaptchaForm, +} + +impl ToFormState for RegisterForm { + type Field = RegisterFormField; +} + +#[derive(Deserialize)] +pub struct QueryParams { + username: Option, + #[serde(flatten)] + action: OptionalPostAuthAction, +} + +#[tracing::instrument(name = "handlers.views.password_register.get", skip_all)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + State(site_config): State, + mut repo: BoxRepository, + Query(query): Query, + cookie_jar: CookieJar, +) -> Result { + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let (session_info, cookie_jar) = cookie_jar.session_info(); + + let maybe_session = session_info.load_active_session(&mut repo).await?; + + if maybe_session.is_some() { + let reply = query.action.go_next(&url_builder); + return Ok((cookie_jar, reply).into_response()); + } + + if !site_config.password_registration_enabled { + // If password-based registration is disabled, redirect to the login page here + return Ok(url_builder + .redirect(&mas_router::Login::from(query.action.post_auth_action)) + .into_response()); + } + + let mut ctx = PasswordRegisterContext::default(); + + // If we got a username from the query string, use it to prefill the form + if let Some(username) = query.username { + let mut form_state = FormState::default(); + form_state.set_value(RegisterFormField::Username, Some(username)); + ctx = ctx.with_form_state(form_state); + } + + let content = render( + locale, + ctx, + query.action, + csrf_token, + &mut repo, + &templates, + site_config.captcha.clone(), + ) + .await?; + + Ok((cookie_jar, Html(content)).into_response()) +} + +#[tracing::instrument(name = "handlers.views.password_register.post", skip_all)] +#[allow(clippy::too_many_arguments)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(password_manager): State, + State(templates): State, + State(url_builder): State, + State(site_config): State, + State(homeserver): State>, + State(http_client): State, + (State(limiter), requester): (State, RequesterFingerprint), + mut policy: Policy, + mut repo: BoxRepository, + (user_agent, activity_tracker): ( + Option>, + BoundActivityTracker, + ), + Query(query): Query, + cookie_jar: CookieJar, + Form(form): Form>, +) -> Result { + let user_agent = user_agent.map(|ua| ua.as_str().to_owned()); + + let ip_address = activity_tracker.ip(); + if !site_config.password_registration_enabled { + return Ok(StatusCode::METHOD_NOT_ALLOWED.into_response()); + } + + let form = cookie_jar.verify_form(&clock, form)?; + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + // Validate the captcha + // TODO: display a nice error message to the user + let passed_captcha = form + .captcha + .verify( + &activity_tracker, + &http_client, + url_builder.public_hostname(), + site_config.captcha.as_ref(), + ) + .await + .is_ok(); + + let state = form.to_form_state(); + + // The email form is only shown if the server requires it + let email = site_config + .password_registration_email_required + .then_some(form.email); + + // Validate the form + let state = { + let mut state = state; + + if !passed_captcha { + state.add_error_on_form(FormError::Captcha); + } + + let mut homeserver_denied_username = false; + if form.username.is_empty() { + state.add_error_on_field(RegisterFormField::Username, FieldError::Required); + } else if repo.user().exists(&form.username).await? { + // The user already exists in the database + state.add_error_on_field(RegisterFormField::Username, FieldError::Exists); + } else if !homeserver + .is_localpart_available(&form.username) + .await + .map_err(InternalError::from_anyhow)? + { + // The user already exists on the homeserver + tracing::warn!( + username = &form.username, + "Homeserver denied username provided by user" + ); + + // We defer adding the error on the field, until we know whether we had another + // error from the policy, to avoid showing both + homeserver_denied_username = true; + } + + if let Some(email) = &email { + // Note that we don't check here if the email is already taken here, as + // we don't want to leak the information about other users. Instead, we will + // show an error message once the user confirmed their email address. + if email.is_empty() { + state.add_error_on_field(RegisterFormField::Email, FieldError::Required); + } else if Address::from_str(email).is_err() { + state.add_error_on_field(RegisterFormField::Email, FieldError::Invalid); + } + } + + if form.password.is_empty() { + state.add_error_on_field(RegisterFormField::Password, FieldError::Required); + } + + if form.password_confirm.is_empty() { + state.add_error_on_field(RegisterFormField::PasswordConfirm, FieldError::Required); + } + + if form.password != form.password_confirm { + state.add_error_on_field(RegisterFormField::Password, FieldError::Unspecified); + state.add_error_on_field( + RegisterFormField::PasswordConfirm, + FieldError::PasswordMismatch, + ); + } + + if !password_manager.is_password_complex_enough(&form.password)? { + // TODO localise this error + state.add_error_on_field( + RegisterFormField::Password, + FieldError::Policy { + code: None, + message: "Password is too weak".to_owned(), + }, + ); + } + + // If the site has terms of service, the user must accept them + if site_config.tos_uri.is_some() && form.accept_terms != "on" { + state.add_error_on_field(RegisterFormField::AcceptTerms, FieldError::Required); + } + + let res = policy + .evaluate_register(mas_policy::RegisterInput { + registration_method: mas_policy::RegistrationMethod::Password, + username: &form.username, + email: email.as_deref(), + requester: mas_policy::Requester { + ip_address: activity_tracker.ip(), + user_agent: user_agent.clone(), + }, + }) + .await?; + + for violation in res.violations { + match violation.field.as_deref() { + Some("email") => state.add_error_on_field( + RegisterFormField::Email, + FieldError::Policy { + code: violation.code.map(|c| c.as_str()), + message: violation.msg, + }, + ), + Some("username") => { + // If the homeserver denied the username, but we also had an error on the policy + // side, we don't want to show both, so we reset the state here + homeserver_denied_username = false; + state.add_error_on_field( + RegisterFormField::Username, + FieldError::Policy { + code: violation.code.map(|c| c.as_str()), + message: violation.msg, + }, + ); + } + Some("password") => state.add_error_on_field( + RegisterFormField::Password, + FieldError::Policy { + code: violation.code.map(|c| c.as_str()), + message: violation.msg, + }, + ), + _ => state.add_error_on_form(FormError::Policy { + code: violation.code.map(|c| c.as_str()), + message: violation.msg, + }), + } + } + + if homeserver_denied_username { + // XXX: we may want to return different errors like "this username is reserved" + state.add_error_on_field(RegisterFormField::Username, FieldError::Exists); + } + + if state.is_valid() { + // Check the rate limit if we are about to process the form + if let Err(e) = limiter.check_registration(requester) { + tracing::warn!(error = &e as &dyn std::error::Error); + state.add_error_on_form(FormError::RateLimitExceeded); + } + + if let Some(email) = &email + && let Err(e) = limiter.check_email_authentication_email(requester, email) + { + tracing::warn!(error = &e as &dyn std::error::Error); + state.add_error_on_form(FormError::RateLimitExceeded); + } + } + + state + }; + + if !state.is_valid() { + let content = render( + locale, + PasswordRegisterContext::default().with_form_state(state), + query, + csrf_token, + &mut repo, + &templates, + site_config.captcha.clone(), + ) + .await?; + + return Ok((cookie_jar, Html(content)).into_response()); + } + + let post_auth_action = query + .post_auth_action + .map(serde_json::to_value) + .transpose()?; + let registration = repo + .user_registration() + .add( + &mut rng, + &clock, + form.username, + ip_address, + user_agent, + post_auth_action, + ) + .await?; + + let registration = if let Some(tos_uri) = &site_config.tos_uri { + repo.user_registration() + .set_terms_url(registration, tos_uri.clone()) + .await? + } else { + registration + }; + + let registration = if let Some(email) = email { + // Create a new user email authentication session + let user_email_authentication = repo + .user_email() + .add_authentication_for_registration(&mut rng, &clock, email, ®istration) + .await?; + + // Schedule a job to verify the email + repo.queue_job() + .schedule_job( + &mut rng, + &clock, + SendEmailAuthenticationCodeJob::new(&user_email_authentication, locale.to_string()), + ) + .await?; + + repo.user_registration() + .set_email_authentication(registration, &user_email_authentication) + .await? + } else { + registration + }; + + // Hash the password + let password = Zeroizing::new(form.password); + let (version, hashed_password) = password_manager + .hash(&mut rng, password) + .await + .map_err(InternalError::from_anyhow)?; + + // Add the password to the registration + let registration = repo + .user_registration() + .set_password(registration, hashed_password, version) + .await?; + + repo.save().await?; + + let cookie_jar = UserRegistrationSessions::load(&cookie_jar) + .add(®istration) + .save(cookie_jar, &clock); + + Ok(( + cookie_jar, + url_builder.redirect(&mas_router::RegisterFinish::new(registration.id)), + ) + .into_response()) +} + +async fn render( + locale: DataLocale, + ctx: PasswordRegisterContext, + action: OptionalPostAuthAction, + csrf_token: CsrfToken, + repo: &mut impl RepositoryAccess, + templates: &Templates, + captcha_config: Option, +) -> Result { + let next = action + .load_context(repo) + .await + .map_err(InternalError::from_anyhow)?; + let ctx = if let Some(next) = next { + ctx.with_post_action(next) + } else { + ctx + }; + let ctx = ctx + .with_captcha(captcha_config) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_password_register(&ctx)?; + Ok(content) +} + +#[cfg(test)] +mod tests { + use hyper::{ + Request, StatusCode, + header::{CONTENT_TYPE, LOCATION}, + }; + use mas_router::Route; + use sqlx::PgPool; + + use crate::{ + SiteConfig, + test_utils::{ + CookieHelper, RequestBuilderExt, ResponseExt, TestState, setup, test_site_config, + }, + }; + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_password_disabled(pool: PgPool) { + setup(); + let state = TestState::from_pool_with_site_config( + pool, + SiteConfig { + password_login_enabled: false, + password_registration_enabled: false, + ..test_site_config() + }, + ) + .await + .unwrap(); + + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let response = state.request(request).await; + response.assert_status(StatusCode::SEE_OTHER); + response.assert_header_value(LOCATION, "/login"); + + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": "abc", + "username": "john", + "email": "john@example.com", + "password": "hunter2", + "password_confirm": "hunter2", + })); + let response = state.request(request).await; + response.assert_status(StatusCode::METHOD_NOT_ALLOWED); + } + + /// Test the registration happy path + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let cookies = CookieHelper::new(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the registration form + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "email": "john@example.com", + "password": "correcthorsebatterystaple", + "password_confirm": "correcthorsebatterystaple", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::SEE_OTHER); + let location = response.headers().get(LOCATION).unwrap(); + + // The handler redirects with the ID as the second to last portion of the path + let id = location + .to_str() + .unwrap() + .rsplit('/') + .nth(1) + .unwrap() + .parse() + .unwrap(); + + // There should be a new registration in the database + let mut repo = state.repository().await.unwrap(); + let registration = repo.user_registration().lookup(id).await.unwrap().unwrap(); + assert_eq!(registration.username, "john".to_owned()); + assert!(registration.password.is_some()); + + let email_authentication = repo + .user_email() + .lookup_authentication(registration.email_authentication_id.unwrap()) + .await + .unwrap() + .unwrap(); + assert_eq!(email_authentication.email, "john@example.com"); + } + + /// When the two password fields mismatch, it should give an error + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_password_mismatch(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let cookies = CookieHelper::new(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the registration form + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "email": "john@example.com", + "password": "hunter2", + "password_confirm": "mismatch", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + assert!(response.body().contains("Password fields don't match")); + } + + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_username_too_long(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let cookies = CookieHelper::new(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the registration form + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "a".repeat(256), + "email": "john@example.com", + "password": "hunter2", + "password_confirm": "hunter2", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + assert!( + response.body().contains("Username is too long"), + "response body: {}", + response.body() + ); + } + + /// When the user already exists in the database, it should give an error + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_user_exists(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let mut rng = state.rng(); + let cookies = CookieHelper::new(); + + // Insert a user in the database first + let mut repo = state.repository().await.unwrap(); + repo.user() + .add(&mut rng, &state.clock, "john".to_owned()) + .await + .unwrap(); + repo.save().await.unwrap(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the registration form + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "email": "john@example.com", + "password": "hunter2", + "password_confirm": "hunter2", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + assert!(response.body().contains("This username is already taken")); + } + + /// When the username is already reserved on the homeserver, it should give + /// an error + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_user_reserved(pool: PgPool) { + setup(); + let state = TestState::from_pool(pool).await.unwrap(); + let cookies = CookieHelper::new(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Reserve "john" on the homeserver + state.homeserver_connection.reserve_localpart("john").await; + + // Submit the registration form + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "john", + "email": "john@example.com", + "password": "hunter2", + "password_confirm": "hunter2", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + assert!(response.body().contains("This username is already taken")); + } + + /// Test registration without email when email is not required + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_without_email_when_not_required(pool: PgPool) { + setup(); + let state = TestState::from_pool_with_site_config( + pool, + SiteConfig { + password_registration_email_required: false, + ..test_site_config() + }, + ) + .await + .unwrap(); + let cookies = CookieHelper::new(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the registration form without email + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "alice", + "password": "correcthorsebatterystaple", + "password_confirm": "correcthorsebatterystaple", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::SEE_OTHER); + let location = response.headers().get(LOCATION).unwrap(); + + // The handler redirects with the ID as the second to last portion of the path + let id = location + .to_str() + .unwrap() + .rsplit('/') + .nth(1) + .unwrap() + .parse() + .unwrap(); + + // There should be a new registration in the database + let mut repo = state.repository().await.unwrap(); + let registration = repo.user_registration().lookup(id).await.unwrap().unwrap(); + assert_eq!(registration.username, "alice".to_owned()); + assert!(registration.password.is_some()); + // Email authentication should be None when email is not required and not + // provided + assert!(registration.email_authentication_id.is_none()); + } + + /// Test registration with valid email when email is not required + /// (email input is ignored completely when not required) + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_with_email_when_not_required(pool: PgPool) { + setup(); + let state = TestState::from_pool_with_site_config( + pool, + SiteConfig { + password_registration_email_required: false, + ..test_site_config() + }, + ) + .await + .unwrap(); + let cookies = CookieHelper::new(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the registration form with valid email + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "charlie", + "email": "charlie@example.com", + "password": "correcthorsebatterystaple", + "password_confirm": "correcthorsebatterystaple", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::SEE_OTHER); + let location = response.headers().get(LOCATION).unwrap(); + + // The handler redirects with the ID as the second to last portion of the path + let id = location + .to_str() + .unwrap() + .rsplit('/') + .nth(1) + .unwrap() + .parse() + .unwrap(); + + // There should be a new registration in the database + let mut repo = state.repository().await.unwrap(); + let registration = repo.user_registration().lookup(id).await.unwrap().unwrap(); + assert_eq!(registration.username, "charlie".to_owned()); + assert!(registration.password.is_some()); + + // Email authentication should be None when email is not required + // (email input is completely ignored in this case) + assert!(registration.email_authentication_id.is_none()); + } + + /// Test registration fails when email is required but not provided + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_fails_without_email_when_required(pool: PgPool) { + setup(); + let state = TestState::from_pool_with_site_config( + pool, + SiteConfig { + password_registration_email_required: true, + ..test_site_config() + }, + ) + .await + .unwrap(); + let cookies = CookieHelper::new(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the registration form without email + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "david", + "password": "correcthorsebatterystaple", + "password_confirm": "correcthorsebatterystaple", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + + // Check that the response contains an error about the email field + let body = response.body(); + assert!(body.contains("email") || body.contains("Email")); + + // Ensure no registration was created + let mut repo = state.repository().await.unwrap(); + let user_exists = repo.user().exists("david").await.unwrap(); + assert!(!user_exists); + } + + /// Test registration fails when email is required but empty + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_fails_with_empty_email_when_required(pool: PgPool) { + setup(); + let state = TestState::from_pool_with_site_config( + pool, + SiteConfig { + password_registration_email_required: true, + ..test_site_config() + }, + ) + .await + .unwrap(); + let cookies = CookieHelper::new(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the registration form with empty email + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "eve", + "email": "", + "password": "correcthorsebatterystaple", + "password_confirm": "correcthorsebatterystaple", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + + // Check that the response contains an error about the email field + let body = response.body(); + assert!(body.contains("email") || body.contains("Email")); + + // Ensure no registration was created + let mut repo = state.repository().await.unwrap(); + let user_exists = repo.user().exists("eve").await.unwrap(); + assert!(!user_exists); + } + + /// Test registration fails with invalid email when email is required + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_register_fails_with_invalid_email_when_required(pool: PgPool) { + setup(); + let state = TestState::from_pool_with_site_config( + pool, + SiteConfig { + password_registration_email_required: true, + ..test_site_config() + }, + ) + .await + .unwrap(); + let cookies = CookieHelper::new(); + + // Render the registration page and get the CSRF token + let request = + Request::get(&*mas_router::PasswordRegister::default().path_and_query()).empty(); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + // Extract the CSRF token from the response body + let csrf_token = response + .body() + .split("name=\"csrf\" value=\"") + .nth(1) + .unwrap() + .split('\"') + .next() + .unwrap(); + + // Submit the registration form with invalid email + let request = Request::post(&*mas_router::PasswordRegister::default().path_and_query()) + .form(serde_json::json!({ + "csrf": csrf_token, + "username": "grace", + "email": "not-an-email", + "password": "correcthorsebatterystaple", + "password_confirm": "correcthorsebatterystaple", + "accept_terms": "on", + })); + let request = cookies.with_cookies(request); + let response = state.request(request).await; + cookies.save_cookies(&response); + response.assert_status(StatusCode::OK); + response.assert_header_value(CONTENT_TYPE, "text/html; charset=utf-8"); + + // Check that the response contains an error about the email field + let body = response.body(); + assert!(body.contains("email") || body.contains("Email")); + + // Ensure no registration was created + let mut repo = state.repository().await.unwrap(); + let user_exists = repo.user().exists("grace").await.unwrap(); + assert!(!user_exists); + } +} diff --git a/matrix-authentication-service/crates/handlers/src/views/register/steps/display_name.rs b/matrix-authentication-service/crates/handlers/src/views/register/steps/display_name.rs new file mode 100644 index 00000000..0749c097 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/register/steps/display_name.rs @@ -0,0 +1,183 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use axum::{ + Form, + extract::{Path, State}, + response::{Html, IntoResponse, Response}, +}; +use mas_axum_utils::{ + InternalError, + cookies::CookieJar, + csrf::{CsrfExt as _, ProtectedForm}, +}; +use mas_data_model::{BoxClock, BoxRng}; +use mas_router::{PostAuthAction, UrlBuilder}; +use mas_storage::BoxRepository; +use mas_templates::{ + FieldError, RegisterStepsDisplayNameContext, RegisterStepsDisplayNameFormField, + TemplateContext as _, Templates, ToFormState, +}; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::{PreferredLanguage, views::shared::OptionalPostAuthAction}; + +#[derive(Deserialize, Default)] +#[serde(rename_all = "snake_case")] +enum FormAction { + #[default] + Set, + Skip, +} + +#[derive(Deserialize, Serialize)] +pub(crate) struct DisplayNameForm { + #[serde(skip_serializing, default)] + action: FormAction, + #[serde(default)] + display_name: String, +} + +impl ToFormState for DisplayNameForm { + type Field = mas_templates::RegisterStepsDisplayNameFormField; +} + +#[tracing::instrument( + name = "handlers.views.register.steps.display_name.get", + fields(user_registration.id = %id), + skip_all, +)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + mut repo: BoxRepository, + Path(id): Path, + cookie_jar: CookieJar, +) -> Result { + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let registration = repo + .user_registration() + .lookup(id) + .await? + .context("Could not find user registration") + .map_err(InternalError::from_anyhow)?; + + // If the registration is completed, we can go to the registration destination + // XXX: this might not be the right thing to do? Maybe an error page would be + // better? + if registration.completed_at.is_some() { + let post_auth_action: Option = registration + .post_auth_action + .map(serde_json::from_value) + .transpose()?; + + return Ok(( + cookie_jar, + OptionalPostAuthAction::from(post_auth_action) + .go_next(&url_builder) + .into_response(), + ) + .into_response()); + } + + let ctx = RegisterStepsDisplayNameContext::new() + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_register_steps_display_name(&ctx)?; + + Ok((cookie_jar, Html(content)).into_response()) +} + +#[tracing::instrument( + name = "handlers.views.register.steps.display_name.post", + fields(user_registration.id = %id), + skip_all, +)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + mut repo: BoxRepository, + Path(id): Path, + cookie_jar: CookieJar, + Form(form): Form>, +) -> Result { + let registration = repo + .user_registration() + .lookup(id) + .await? + .context("Could not find user registration") + .map_err(InternalError::from_anyhow)?; + + // If the registration is completed, we can go to the registration destination + // XXX: this might not be the right thing to do? Maybe an error page would be + // better? + if registration.completed_at.is_some() { + let post_auth_action: Option = registration + .post_auth_action + .map(serde_json::from_value) + .transpose()?; + + return Ok(( + cookie_jar, + OptionalPostAuthAction::from(post_auth_action) + .go_next(&url_builder) + .into_response(), + ) + .into_response()); + } + + let form = cookie_jar.verify_form(&clock, form)?; + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let display_name = match form.action { + FormAction::Set => { + let display_name = form.display_name.trim(); + + if display_name.is_empty() || display_name.len() > 255 { + let ctx = RegisterStepsDisplayNameContext::new() + .with_form_state(form.to_form_state().with_error_on_field( + RegisterStepsDisplayNameFormField::DisplayName, + FieldError::Invalid, + )) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + return Ok(( + cookie_jar, + Html(templates.render_register_steps_display_name(&ctx)?), + ) + .into_response()); + } + + display_name.to_owned() + } + FormAction::Skip => { + // If the user chose to skip, we do the same as Synapse and use the localpart as + // default display name + registration.username.clone() + } + }; + + let registration = repo + .user_registration() + .set_display_name(registration, display_name) + .await?; + + repo.save().await?; + + let destination = mas_router::RegisterFinish::new(registration.id); + return Ok((cookie_jar, url_builder.redirect(&destination)).into_response()); +} diff --git a/matrix-authentication-service/crates/handlers/src/views/register/steps/finish.rs b/matrix-authentication-service/crates/handlers/src/views/register/steps/finish.rs new file mode 100644 index 00000000..af0b8ef9 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/register/steps/finish.rs @@ -0,0 +1,367 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::{Arc, LazyLock}; + +use anyhow::Context as _; +use axum::{ + extract::{Path, State}, + response::{Html, IntoResponse, Response}, +}; +use axum_extra::TypedHeader; +use chrono::Duration; +use mas_axum_utils::{InternalError, SessionInfoExt as _, cookies::CookieJar}; +use mas_data_model::{BoxClock, BoxRng, SiteConfig}; +use mas_matrix::HomeserverConnection; +use mas_router::{PostAuthAction, UrlBuilder}; +use mas_storage::{ + BoxRepository, + queue::{ProvisionUserJob, QueueJobRepositoryExt as _}, + user::UserEmailFilter, +}; +use mas_templates::{RegisterStepsEmailInUseContext, TemplateContext as _, Templates}; +use opentelemetry::metrics::Counter; +use ulid::Ulid; + +use super::super::cookie::UserRegistrationSessions; +use crate::{ + BoundActivityTracker, METER, PreferredLanguage, views::shared::OptionalPostAuthAction, +}; + +static PASSWORD_REGISTER_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("mas.user.password_registration") + .with_description("Number of password registrations") + .with_unit("{registration}") + .build() +}); + +#[tracing::instrument( + name = "handlers.views.register.steps.finish.get", + fields(user_registration.id = %id), + skip_all, +)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + mut repo: BoxRepository, + activity_tracker: BoundActivityTracker, + user_agent: Option>, + State(url_builder): State, + State(homeserver): State>, + State(templates): State, + State(site_config): State, + PreferredLanguage(lang): PreferredLanguage, + cookie_jar: CookieJar, + Path(id): Path, +) -> Result { + let user_agent = user_agent.map(|ua| ua.as_str().to_owned()); + let registration = repo + .user_registration() + .lookup(id) + .await? + .context("User registration not found") + .map_err(InternalError::from_anyhow)?; + + // If the registration is completed, we can go to the registration destination + // XXX: this might not be the right thing to do? Maybe an error page would be + // better? + if registration.completed_at.is_some() { + let post_auth_action: Option = registration + .post_auth_action + .map(serde_json::from_value) + .transpose()?; + + return Ok(( + cookie_jar, + OptionalPostAuthAction::from(post_auth_action).go_next(&url_builder), + ) + .into_response()); + } + + // Make sure the registration session hasn't expired + // XXX: this duration is hard-coded, could be configurable + if clock.now() - registration.created_at > Duration::hours(1) { + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "Registration session has expired" + ))); + } + + // Check that this registration belongs to this browser + let registrations = UserRegistrationSessions::load(&cookie_jar); + if !registrations.contains(®istration) { + // XXX: we should have a better error screen here + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "Could not find the registration in the browser cookies" + ))); + } + + // Let's perform last minute checks on the registration, especially to avoid + // race conditions where multiple users register with the same username or email + // address + + if repo.user().exists(®istration.username).await? { + // XXX: this could have a better error message, but as this is unlikely to + // happen, we're fine with a vague message for now + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "Username is already taken" + ))); + } + + if !homeserver + .is_localpart_available(®istration.username) + .await + .map_err(InternalError::from_anyhow)? + { + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "Username is not available" + ))); + } + + // Check if the registration token is required and was provided + let registration_token = if site_config.registration_token_required { + if let Some(registration_token_id) = registration.user_registration_token_id { + let registration_token = repo + .user_registration_token() + .lookup(registration_token_id) + .await? + .context("Could not load the registration token") + .map_err(InternalError::from_anyhow)?; + + if !registration_token.is_valid(clock.now()) { + // XXX: the registration token isn't valid anymore, we should + // have a better error in this case? + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "Registration token used is no longer valid" + ))); + } + + Some(registration_token) + } else { + // Else redirect to the registration token page + return Ok(( + cookie_jar, + url_builder.redirect(&mas_router::RegisterToken::new(registration.id)), + ) + .into_response()); + } + } else { + None + }; + + // If there is an email authentication, we need to check that the email + // address was verified. If there is no email authentication attached, we + // need to make sure the server doesn't require it + let email_authentication = + if let Some(email_authentication_id) = registration.email_authentication_id { + let email_authentication = repo + .user_email() + .lookup_authentication(email_authentication_id) + .await? + .context("Could not load the email authentication") + .map_err(InternalError::from_anyhow)?; + + // Check that the email authentication has been completed + if email_authentication.completed_at.is_none() { + return Ok(( + cookie_jar, + url_builder.redirect(&mas_router::RegisterVerifyEmail::new(id)), + ) + .into_response()); + } + + // Check that the email address isn't already used + // It is important to do that here, as we we're not checking during the + // registration, because we don't want to disclose whether an email is + // already being used or not before we verified it + if repo + .user_email() + .count(UserEmailFilter::new().for_email(&email_authentication.email)) + .await? + > 0 + { + let action = registration + .post_auth_action + .map(serde_json::from_value) + .transpose()?; + + let ctx = RegisterStepsEmailInUseContext::new(email_authentication.email, action) + .with_language(lang); + + return Ok(( + cookie_jar, + Html(templates.render_register_steps_email_in_use(&ctx)?), + ) + .into_response()); + } + + Some(email_authentication) + } else { + None + }; + + // If this registration was created from an upstream OAuth session, check + // it is still valid and wasn't linked to a user in the meantime + let upstream_oauth = if let Some(upstream_oauth_authorization_session_id) = + registration.upstream_oauth_authorization_session_id + { + let upstream_oauth_authorization_session = repo + .upstream_oauth_session() + .lookup(upstream_oauth_authorization_session_id) + .await? + .context("Could not load the upstream OAuth authorization session") + .map_err(InternalError::from_anyhow)?; + + let link_id = upstream_oauth_authorization_session + .link_id() + // This should not happen, the session is associated with the user + // registration once the link was already created + .context("Authorization session has no upstream link associated with it") + .map_err(InternalError::from_anyhow)?; + + if upstream_oauth_authorization_session.is_consumed() { + // This means an authorization session was used to create multiple + // user registrations. This can happen if the user goes back in + // their navigation history and basically registers twice. We also + // used to consume the session earlier in the flow, so it's also + // possible that it happens during the rollout of that version. This + // is not going to happen often enough to have a dedicated page + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "The upstream authorization session was already used. Try registering again" + ))); + } + + let upstream_oauth_link = repo + .upstream_oauth_link() + .lookup(link_id) + .await? + .context("Could not load the upstream OAuth link") + .map_err(InternalError::from_anyhow)?; + + if upstream_oauth_link.user_id.is_some() { + // This means the link was already associated to a user. This could + // in theory happen if the same user registers concurrently, but + // this is not going to happen often enough to have a dedicated page + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "The upstream identity was already linked to a user. Try logging in again" + ))); + } + + Some((upstream_oauth_authorization_session, upstream_oauth_link)) + } else { + None + }; + + // Check that the display name is set + if registration.display_name.is_none() { + return Ok(( + cookie_jar, + url_builder.redirect(&mas_router::RegisterDisplayName::new(registration.id)), + ) + .into_response()); + } + + // Everything is good, let's complete the registration + let registration = repo + .user_registration() + .complete(&clock, registration) + .await?; + + // If we used a registration token, we need to mark it as used + if let Some(registration_token) = registration_token { + repo.user_registration_token() + .use_token(&clock, registration_token) + .await?; + } + + // Consume the registration session + let cookie_jar = registrations + .consume_session(®istration)? + .save(cookie_jar, &clock); + + // Now we can start the user creation + let user = repo + .user() + .add(&mut rng, &clock, registration.username) + .await?; + // Also create a browser session which will log the user in + let user_session = repo + .browser_session() + .add(&mut rng, &clock, &user, user_agent) + .await?; + + if let Some(email_authentication) = email_authentication { + repo.user_email() + .add(&mut rng, &clock, &user, email_authentication.email) + .await?; + } + + if let Some(password) = registration.password { + let user_password = repo + .user_password() + .add( + &mut rng, + &clock, + &user, + password.version, + password.hashed_password, + None, + ) + .await?; + + repo.browser_session() + .authenticate_with_password(&mut rng, &clock, &user_session, &user_password) + .await?; + + PASSWORD_REGISTER_COUNTER.add(1, &[]); + } + + if let Some((upstream_session, upstream_link)) = upstream_oauth { + let upstream_session = repo + .upstream_oauth_session() + .consume(&clock, upstream_session, &user_session) + .await?; + + repo.upstream_oauth_link() + .associate_to_user(&upstream_link, &user) + .await?; + + repo.browser_session() + .authenticate_with_upstream(&mut rng, &clock, &user_session, &upstream_session) + .await?; + } + + if let Some(terms_url) = registration.terms_url { + repo.user_terms() + .accept_terms(&mut rng, &clock, &user, terms_url) + .await?; + } + + let mut job = ProvisionUserJob::new(&user); + if let Some(display_name) = registration.display_name { + job = job.set_display_name(display_name); + } + repo.queue_job().schedule_job(&mut rng, &clock, job).await?; + + repo.save().await?; + + activity_tracker + .record_browser_session(&clock, &user_session) + .await; + + let post_auth_action: Option = registration + .post_auth_action + .map(serde_json::from_value) + .transpose()?; + + // Login the user with the session we just created + let cookie_jar = cookie_jar.set_session(&user_session); + + return Ok(( + cookie_jar, + OptionalPostAuthAction::from(post_auth_action).go_next(&url_builder), + ) + .into_response()); +} diff --git a/matrix-authentication-service/crates/handlers/src/views/register/steps/mod.rs b/matrix-authentication-service/crates/handlers/src/views/register/steps/mod.rs new file mode 100644 index 00000000..715934d9 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/register/steps/mod.rs @@ -0,0 +1,9 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +pub(crate) mod display_name; +pub(crate) mod finish; +pub(crate) mod registration_token; +pub(crate) mod verify_email; diff --git a/matrix-authentication-service/crates/handlers/src/views/register/steps/registration_token.rs b/matrix-authentication-service/crates/handlers/src/views/register/steps/registration_token.rs new file mode 100644 index 00000000..f9ad5b87 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/register/steps/registration_token.rs @@ -0,0 +1,202 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context as _; +use axum::{ + Form, + extract::{Path, State}, + response::{Html, IntoResponse, Response}, +}; +use mas_axum_utils::{ + InternalError, + cookies::CookieJar, + csrf::{CsrfExt as _, ProtectedForm}, +}; +use mas_data_model::{BoxClock, BoxRng}; +use mas_router::{PostAuthAction, UrlBuilder}; +use mas_storage::BoxRepository; +use mas_templates::{ + FieldError, RegisterStepsRegistrationTokenContext, RegisterStepsRegistrationTokenFormField, + TemplateContext as _, Templates, ToFormState, +}; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::{PreferredLanguage, views::shared::OptionalPostAuthAction}; + +#[derive(Deserialize, Serialize)] +pub(crate) struct RegistrationTokenForm { + #[serde(default)] + token: String, +} + +impl ToFormState for RegistrationTokenForm { + type Field = mas_templates::RegisterStepsRegistrationTokenFormField; +} + +#[tracing::instrument( + name = "handlers.views.register.steps.registration_token.get", + fields(user_registration.id = %id), + skip_all, +)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + mut repo: BoxRepository, + Path(id): Path, + cookie_jar: CookieJar, +) -> Result { + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let registration = repo + .user_registration() + .lookup(id) + .await? + .context("Could not find user registration") + .map_err(InternalError::from_anyhow)?; + + // If the registration is completed, we can go to the registration destination + if registration.completed_at.is_some() { + let post_auth_action: Option = registration + .post_auth_action + .map(serde_json::from_value) + .transpose()?; + + return Ok(( + cookie_jar, + OptionalPostAuthAction::from(post_auth_action) + .go_next(&url_builder) + .into_response(), + ) + .into_response()); + } + + // If the registration already has a token, skip this step + if registration.user_registration_token_id.is_some() { + let destination = mas_router::RegisterDisplayName::new(registration.id); + return Ok((cookie_jar, url_builder.redirect(&destination)).into_response()); + } + + let ctx = RegisterStepsRegistrationTokenContext::new() + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_register_steps_registration_token(&ctx)?; + + Ok((cookie_jar, Html(content)).into_response()) +} + +#[tracing::instrument( + name = "handlers.views.register.steps.registration_token.post", + fields(user_registration.id = %id), + skip_all, +)] +pub(crate) async fn post( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + mut repo: BoxRepository, + Path(id): Path, + cookie_jar: CookieJar, + Form(form): Form>, +) -> Result { + let registration = repo + .user_registration() + .lookup(id) + .await? + .context("Could not find user registration") + .map_err(InternalError::from_anyhow)?; + + // If the registration is completed, we can go to the registration destination + if registration.completed_at.is_some() { + let post_auth_action: Option = registration + .post_auth_action + .map(serde_json::from_value) + .transpose()?; + + return Ok(( + cookie_jar, + OptionalPostAuthAction::from(post_auth_action) + .go_next(&url_builder) + .into_response(), + ) + .into_response()); + } + + let form = cookie_jar.verify_form(&clock, form)?; + + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + // Validate the token + let token = form.token.trim(); + if token.is_empty() { + let ctx = RegisterStepsRegistrationTokenContext::new() + .with_form_state(form.to_form_state().with_error_on_field( + RegisterStepsRegistrationTokenFormField::Token, + FieldError::Required, + )) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + return Ok(( + cookie_jar, + Html(templates.render_register_steps_registration_token(&ctx)?), + ) + .into_response()); + } + + // Look up the token + let Some(registration_token) = repo.user_registration_token().find_by_token(token).await? + else { + let ctx = RegisterStepsRegistrationTokenContext::new() + .with_form_state(form.to_form_state().with_error_on_field( + RegisterStepsRegistrationTokenFormField::Token, + FieldError::Invalid, + )) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + return Ok(( + cookie_jar, + Html(templates.render_register_steps_registration_token(&ctx)?), + ) + .into_response()); + }; + + // Check if the token is still valid + if !registration_token.is_valid(clock.now()) { + tracing::warn!("Registration token isn't valid (expired or already used)"); + let ctx = RegisterStepsRegistrationTokenContext::new() + .with_form_state(form.to_form_state().with_error_on_field( + RegisterStepsRegistrationTokenFormField::Token, + FieldError::Invalid, + )) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + return Ok(( + cookie_jar, + Html(templates.render_register_steps_registration_token(&ctx)?), + ) + .into_response()); + } + + // Associate the token with the registration + let registration = repo + .user_registration() + .set_registration_token(registration, ®istration_token) + .await?; + + repo.save().await?; + + // Continue to the next step + let destination = mas_router::RegisterFinish::new(registration.id); + Ok((cookie_jar, url_builder.redirect(&destination)).into_response()) +} diff --git a/matrix-authentication-service/crates/handlers/src/views/register/steps/verify_email.rs b/matrix-authentication-service/crates/handlers/src/views/register/steps/verify_email.rs new file mode 100644 index 00000000..d1312c95 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/register/steps/verify_email.rs @@ -0,0 +1,210 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context; +use axum::{ + extract::{Form, Path, State}, + response::{Html, IntoResponse, Response}, +}; +use mas_axum_utils::{ + InternalError, + cookies::CookieJar, + csrf::{CsrfExt, ProtectedForm}, +}; +use mas_data_model::{BoxClock, BoxRng}; +use mas_router::{PostAuthAction, UrlBuilder}; +use mas_storage::{BoxRepository, RepositoryAccess, user::UserEmailRepository}; +use mas_templates::{ + FieldError, RegisterStepsVerifyEmailContext, RegisterStepsVerifyEmailFormField, + TemplateContext, Templates, ToFormState, +}; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::{Limiter, PreferredLanguage, views::shared::OptionalPostAuthAction}; + +#[derive(Serialize, Deserialize, Debug)] +pub struct CodeForm { + code: String, +} + +impl ToFormState for CodeForm { + type Field = mas_templates::RegisterStepsVerifyEmailFormField; +} + +#[tracing::instrument( + name = "handlers.views.register.steps.verify_email.get", + fields(user_registration.id = %id), + skip_all, +)] +pub(crate) async fn get( + mut rng: BoxRng, + clock: BoxClock, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(url_builder): State, + mut repo: BoxRepository, + Path(id): Path, + cookie_jar: CookieJar, +) -> Result { + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + + let registration = repo + .user_registration() + .lookup(id) + .await? + .context("Could not find user registration") + .map_err(InternalError::from_anyhow)?; + + // If the registration is completed, we can go to the registration destination + // XXX: this might not be the right thing to do? Maybe an error page would be + // better? + if registration.completed_at.is_some() { + let post_auth_action: Option = registration + .post_auth_action + .map(serde_json::from_value) + .transpose()?; + + return Ok(( + cookie_jar, + OptionalPostAuthAction::from(post_auth_action) + .go_next(&url_builder) + .into_response(), + ) + .into_response()); + } + + let email_authentication_id = registration + .email_authentication_id + .context("No email authentication started for this registration") + .map_err(InternalError::from_anyhow)?; + let email_authentication = repo + .user_email() + .lookup_authentication(email_authentication_id) + .await? + .context("Could not find email authentication") + .map_err(InternalError::from_anyhow)?; + + if email_authentication.completed_at.is_some() { + // XXX: display a better error here + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "Email authentication already completed" + ))); + } + + let ctx = RegisterStepsVerifyEmailContext::new(email_authentication) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_register_steps_verify_email(&ctx)?; + + Ok((cookie_jar, Html(content)).into_response()) +} + +#[tracing::instrument( + name = "handlers.views.account_email_verify.post", + fields(user_email.id = %id), + skip_all, +)] +pub(crate) async fn post( + clock: BoxClock, + mut rng: BoxRng, + PreferredLanguage(locale): PreferredLanguage, + State(templates): State, + State(limiter): State, + mut repo: BoxRepository, + cookie_jar: CookieJar, + State(url_builder): State, + Path(id): Path, + Form(form): Form>, +) -> Result { + let form = cookie_jar.verify_form(&clock, form)?; + + let registration = repo + .user_registration() + .lookup(id) + .await? + .context("Could not find user registration") + .map_err(InternalError::from_anyhow)?; + + // If the registration is completed, we can go to the registration destination + // XXX: this might not be the right thing to do? Maybe an error page would be + // better? + if registration.completed_at.is_some() { + let post_auth_action: Option = registration + .post_auth_action + .map(serde_json::from_value) + .transpose()?; + + return Ok(( + cookie_jar, + OptionalPostAuthAction::from(post_auth_action).go_next(&url_builder), + ) + .into_response()); + } + + let email_authentication_id = registration + .email_authentication_id + .context("No email authentication started for this registration") + .map_err(InternalError::from_anyhow)?; + let email_authentication = repo + .user_email() + .lookup_authentication(email_authentication_id) + .await? + .context("Could not find email authentication") + .map_err(InternalError::from_anyhow)?; + + if email_authentication.completed_at.is_some() { + // XXX: display a better error here + return Err(InternalError::from_anyhow(anyhow::anyhow!( + "Email authentication already completed" + ))); + } + + if let Err(e) = limiter.check_email_authentication_attempt(&email_authentication) { + tracing::warn!(error = &e as &dyn std::error::Error); + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let ctx = RegisterStepsVerifyEmailContext::new(email_authentication) + .with_form_state( + form.to_form_state() + .with_error_on_form(mas_templates::FormError::RateLimitExceeded), + ) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_register_steps_verify_email(&ctx)?; + + return Ok((cookie_jar, Html(content)).into_response()); + } + + let Some(code) = repo + .user_email() + .find_authentication_code(&email_authentication, &form.code) + .await? + else { + let (csrf_token, cookie_jar) = cookie_jar.csrf_token(&clock, &mut rng); + let ctx = + RegisterStepsVerifyEmailContext::new(email_authentication) + .with_form_state(form.to_form_state().with_error_on_field( + RegisterStepsVerifyEmailFormField::Code, + FieldError::Invalid, + )) + .with_csrf(csrf_token.form_value()) + .with_language(locale); + + let content = templates.render_register_steps_verify_email(&ctx)?; + + return Ok((cookie_jar, Html(content)).into_response()); + }; + + repo.user_email() + .complete_authentication_with_code(&clock, email_authentication, &code) + .await?; + + repo.save().await?; + + let destination = mas_router::RegisterFinish::new(registration.id); + return Ok((cookie_jar, url_builder.redirect(&destination)).into_response()); +} diff --git a/matrix-authentication-service/crates/handlers/src/views/shared.rs b/matrix-authentication-service/crates/handlers/src/views/shared.rs new file mode 100644 index 00000000..85edf299 --- /dev/null +++ b/matrix-authentication-service/crates/handlers/src/views/shared.rs @@ -0,0 +1,109 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context; +use mas_router::{PostAuthAction, Route, UrlBuilder}; +use mas_storage::{ + RepositoryAccess, + compat::CompatSsoLoginRepository, + oauth2::OAuth2AuthorizationGrantRepository, + upstream_oauth2::{UpstreamOAuthLinkRepository, UpstreamOAuthProviderRepository}, +}; +use mas_templates::{PostAuthContext, PostAuthContextInner}; +use serde::{Deserialize, Serialize}; +use tracing::warn; + +#[derive(Serialize, Deserialize, Default, Debug, Clone)] +pub(crate) struct OptionalPostAuthAction { + #[serde(flatten)] + pub post_auth_action: Option, +} + +impl From> for OptionalPostAuthAction { + fn from(post_auth_action: Option) -> Self { + Self { post_auth_action } + } +} + +impl OptionalPostAuthAction { + pub fn go_next_or_default( + &self, + url_builder: &UrlBuilder, + default: &T, + ) -> axum::response::Redirect { + self.post_auth_action.as_ref().map_or_else( + || url_builder.redirect(default), + |action| action.go_next(url_builder), + ) + } + + pub fn go_next(&self, url_builder: &UrlBuilder) -> axum::response::Redirect { + self.go_next_or_default(url_builder, &mas_router::Index) + } + + pub async fn load_context<'a>( + &'a self, + repo: &'a mut impl RepositoryAccess, + ) -> anyhow::Result> { + let Some(action) = self.post_auth_action.clone() else { + return Ok(None); + }; + let ctx = match action { + PostAuthAction::ContinueAuthorizationGrant { id } => { + let Some(grant) = repo.oauth2_authorization_grant().lookup(id).await? else { + warn!(%id, "Failed to load authorization grant, it was likely deleted or is an invalid ID"); + return Ok(None); + }; + let grant = Box::new(grant); + PostAuthContextInner::ContinueAuthorizationGrant { grant } + } + + PostAuthAction::ContinueDeviceCodeGrant { id } => { + let Some(grant) = repo.oauth2_device_code_grant().lookup(id).await? else { + warn!(%id, "Failed to load device code grant, it was likely deleted or is an invalid ID"); + return Ok(None); + }; + let grant = Box::new(grant); + PostAuthContextInner::ContinueDeviceCodeGrant { grant } + } + + PostAuthAction::ContinueCompatSsoLogin { id } => { + let Some(login) = repo.compat_sso_login().lookup(id).await? else { + warn!(%id, "Failed to load compat SSO login, it was likely deleted or is an invalid ID"); + return Ok(None); + }; + let login = Box::new(login); + PostAuthContextInner::ContinueCompatSsoLogin { login } + } + + PostAuthAction::ChangePassword => PostAuthContextInner::ChangePassword, + + PostAuthAction::LinkUpstream { id } => { + let Some(link) = repo.upstream_oauth_link().lookup(id).await? else { + warn!(%id, "Failed to load upstream OAuth 2.0 link, it was likely deleted or is an invalid ID"); + return Ok(None); + }; + + let provider = repo + .upstream_oauth_provider() + .lookup(link.provider_id) + .await? + .context("Failed to load upstream OAuth 2.0 provider")?; + + let provider = Box::new(provider); + let link = Box::new(link); + PostAuthContextInner::LinkUpstream { provider, link } + } + + PostAuthAction::ManageAccount { .. } => PostAuthContextInner::ManageAccount, + }; + + Ok(Some(PostAuthContext { + params: action.clone(), + ctx, + })) + } +} diff --git a/matrix-authentication-service/crates/http/Cargo.toml b/matrix-authentication-service/crates/http/Cargo.toml new file mode 100644 index 00000000..66eaac60 --- /dev/null +++ b/matrix-authentication-service/crates/http/Cargo.toml @@ -0,0 +1,35 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-http" +description = "HTTP utilities for the Matrix Authentication Service" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +futures-util.workspace = true +headers.workspace = true +http.workspace = true +hyper-util.workspace = true +opentelemetry-http.workspace = true +opentelemetry-semantic-conventions.workspace = true +opentelemetry.workspace = true +reqwest.workspace = true +rustls.workspace = true +rustls-platform-verifier.workspace = true +tokio.workspace = true +tower.workspace = true +tower-http.workspace = true +tracing.workspace = true +tracing-opentelemetry.workspace = true diff --git a/matrix-authentication-service/crates/http/src/ext.rs b/matrix-authentication-service/crates/http/src/ext.rs new file mode 100644 index 00000000..00c8a301 --- /dev/null +++ b/matrix-authentication-service/crates/http/src/ext.rs @@ -0,0 +1,51 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::OnceLock; + +use http::header::HeaderName; +use tower_http::cors::CorsLayer; + +static PROPAGATOR_HEADERS: OnceLock> = OnceLock::new(); + +/// Notify the CORS layer what opentelemetry propagators are being used. This +/// helps whitelisting headers in CORS requests. +/// +/// # Panics +/// +/// When called twice +pub fn set_propagator(propagator: &dyn opentelemetry::propagation::TextMapPropagator) { + let headers = propagator + .fields() + .map(|h| HeaderName::try_from(h).unwrap()) + .collect(); + + tracing::debug!( + ?headers, + "Headers allowed in CORS requests for trace propagators set" + ); + PROPAGATOR_HEADERS + .set(headers) + .expect(concat!(module_path!(), "::set_propagator was called twice")); +} + +pub trait CorsLayerExt { + #[must_use] + fn allow_otel_headers(self, headers: H) -> Self + where + H: IntoIterator; +} + +impl CorsLayerExt for CorsLayer { + fn allow_otel_headers(self, headers: H) -> Self + where + H: IntoIterator, + { + let base = PROPAGATOR_HEADERS.get().cloned().unwrap_or_default(); + let headers: Vec<_> = headers.into_iter().chain(base).collect(); + self.allow_headers(headers) + } +} diff --git a/matrix-authentication-service/crates/http/src/lib.rs b/matrix-authentication-service/crates/http/src/lib.rs new file mode 100644 index 00000000..02d1864d --- /dev/null +++ b/matrix-authentication-service/crates/http/src/lib.rs @@ -0,0 +1,29 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Utilities to do HTTP requests + +#![deny(rustdoc::missing_crate_level_docs)] +#![allow(clippy::module_name_repetitions)] + +use std::sync::LazyLock; + +mod ext; +mod reqwest; + +pub use self::{ + ext::{CorsLayerExt, set_propagator}, + reqwest::{RequestBuilderExt, client as reqwest_client}, +}; + +static METER: LazyLock = LazyLock::new(|| { + let scope = opentelemetry::InstrumentationScope::builder(env!("CARGO_PKG_NAME")) + .with_version(env!("CARGO_PKG_VERSION")) + .with_schema_url(opentelemetry_semantic_conventions::SCHEMA_URL) + .build(); + + opentelemetry::global::meter_with_scope(scope) +}); diff --git a/matrix-authentication-service/crates/http/src/reqwest.rs b/matrix-authentication-service/crates/http/src/reqwest.rs new file mode 100644 index 00000000..a399a742 --- /dev/null +++ b/matrix-authentication-service/crates/http/src/reqwest.rs @@ -0,0 +1,239 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + str::FromStr, + sync::{Arc, LazyLock}, + time::Duration, +}; + +use futures_util::FutureExt as _; +use headers::{ContentLength, HeaderMapExt as _, UserAgent}; +use hyper_util::client::legacy::connect::{ + HttpInfo, + dns::{GaiResolver, Name}, +}; +use opentelemetry::{ + KeyValue, + metrics::{Histogram, UpDownCounter}, +}; +use opentelemetry_http::HeaderInjector; +use opentelemetry_semantic_conventions::{ + attribute::{HTTP_REQUEST_BODY_SIZE, HTTP_RESPONSE_BODY_SIZE}, + metric::{HTTP_CLIENT_ACTIVE_REQUESTS, HTTP_CLIENT_REQUEST_DURATION}, + trace::{ + ERROR_TYPE, HTTP_REQUEST_METHOD, HTTP_RESPONSE_STATUS_CODE, NETWORK_LOCAL_ADDRESS, + NETWORK_LOCAL_PORT, NETWORK_PEER_ADDRESS, NETWORK_PEER_PORT, NETWORK_TRANSPORT, + NETWORK_TYPE, SERVER_ADDRESS, SERVER_PORT, URL_FULL, URL_SCHEME, USER_AGENT_ORIGINAL, + }, +}; +use rustls_platform_verifier::ConfigVerifierExt; +use tokio::time::Instant; +use tower::{BoxError, Service as _}; +use tracing::Instrument; +use tracing_opentelemetry::OpenTelemetrySpanExt; + +use crate::METER; + +static USER_AGENT: &str = concat!("matrix-authentication-service/", env!("CARGO_PKG_VERSION")); + +static HTTP_REQUESTS_DURATION_HISTOGRAM: LazyLock> = LazyLock::new(|| { + METER + .u64_histogram(HTTP_CLIENT_REQUEST_DURATION) + .with_unit("ms") + .with_description("Duration of HTTP client requests") + .build() +}); + +static HTTP_REQUESTS_IN_FLIGHT: LazyLock> = LazyLock::new(|| { + METER + .i64_up_down_counter(HTTP_CLIENT_ACTIVE_REQUESTS) + .with_unit("{requests}") + .with_description("Number of HTTP client requests in flight") + .build() +}); + +struct TracingResolver { + inner: GaiResolver, +} + +impl TracingResolver { + fn new() -> Self { + let inner = GaiResolver::new(); + Self { inner } + } +} + +impl reqwest::dns::Resolve for TracingResolver { + fn resolve(&self, name: reqwest::dns::Name) -> reqwest::dns::Resolving { + let span = tracing::info_span!("dns.resolve", name = name.as_str()); + let inner = &mut self.inner.clone(); + Box::pin( + inner + .call(Name::from_str(name.as_str()).unwrap()) + .map(|result| { + result + .map(|addrs| -> reqwest::dns::Addrs { Box::new(addrs) }) + .map_err(|err| -> BoxError { Box::new(err) }) + }) + .instrument(span), + ) + } +} + +/// Create a new [`reqwest::Client`] with sane parameters +/// +/// # Panics +/// +/// Panics if the client fails to build, which should never happen +#[must_use] +pub fn client() -> reqwest::Client { + // TODO: can/should we limit in-flight requests? + + // The explicit typing here is because `use_preconfigured_tls` accepts + // `Any`, but wants a `ClientConfig` under the hood. This helps us detect + // breaking changes in the rustls-platform-verifier API. + let tls_config: rustls::ClientConfig = + rustls::ClientConfig::with_platform_verifier().expect("failed to create TLS config"); + + reqwest::Client::builder() + .dns_resolver(Arc::new(TracingResolver::new())) + .use_preconfigured_tls(tls_config) + .user_agent(USER_AGENT) + .timeout(Duration::from_secs(60)) + .connect_timeout(Duration::from_secs(30)) + .build() + .expect("failed to create HTTP client") +} + +async fn send_traced( + request: reqwest::RequestBuilder, +) -> Result { + let start = Instant::now(); + let (client, request) = request.build_split(); + let mut request = request?; + + let headers = request.headers(); + let server_address = request.url().host_str().map(ToOwned::to_owned); + let server_port = request.url().port_or_known_default(); + let scheme = request.url().scheme().to_owned(); + let user_agent = headers + .typed_get::() + .map(tracing::field::display); + let content_length = headers.typed_get().map(|ContentLength(len)| len); + let method = request.method().to_string(); + + // Create a new span for the request + let span = tracing::info_span!( + "http.client.request", + "otel.kind" = "client", + "otel.status_code" = tracing::field::Empty, + { HTTP_REQUEST_METHOD } = method, + { URL_FULL } = %request.url(), + { HTTP_RESPONSE_STATUS_CODE } = tracing::field::Empty, + { SERVER_ADDRESS } = server_address, + { SERVER_PORT } = server_port, + { HTTP_REQUEST_BODY_SIZE } = content_length, + { HTTP_RESPONSE_BODY_SIZE } = tracing::field::Empty, + { NETWORK_TRANSPORT } = "tcp", + { NETWORK_TYPE } = tracing::field::Empty, + { NETWORK_LOCAL_ADDRESS } = tracing::field::Empty, + { NETWORK_LOCAL_PORT } = tracing::field::Empty, + { NETWORK_PEER_ADDRESS } = tracing::field::Empty, + { NETWORK_PEER_PORT } = tracing::field::Empty, + { USER_AGENT_ORIGINAL } = user_agent, + "rust.error" = tracing::field::Empty, + ); + + // Inject the span context into the request headers + let context = span.context(); + opentelemetry::global::get_text_map_propagator(|propagator| { + let mut injector = HeaderInjector(request.headers_mut()); + propagator.inject_context(&context, &mut injector); + }); + + let mut metrics_labels = vec![ + KeyValue::new(HTTP_REQUEST_METHOD, method.clone()), + KeyValue::new(URL_SCHEME, scheme), + ]; + + if let Some(server_address) = server_address { + metrics_labels.push(KeyValue::new(SERVER_ADDRESS, server_address)); + } + + if let Some(server_port) = server_port { + metrics_labels.push(KeyValue::new(SERVER_PORT, i64::from(server_port))); + } + + HTTP_REQUESTS_IN_FLIGHT.add(1, &metrics_labels); + async move { + let span = tracing::Span::current(); + let result = client.execute(request).await; + + // XXX: We *could* loose this if the future is dropped before this, but let's + // not worry about it for now. Ideally we would use a `Drop` guard to decrement + // the counter + HTTP_REQUESTS_IN_FLIGHT.add(-1, &metrics_labels); + + let duration = start.elapsed().as_millis().try_into().unwrap_or(u64::MAX); + let result = match result { + Ok(response) => { + span.record("otel.status_code", "OK"); + span.record(HTTP_RESPONSE_STATUS_CODE, response.status().as_u16()); + + if let Some(ContentLength(content_length)) = response.headers().typed_get() { + span.record(HTTP_RESPONSE_BODY_SIZE, content_length); + } + + if let Some(http_info) = response.extensions().get::() { + let local = http_info.local_addr(); + let peer = http_info.remote_addr(); + let family = if local.is_ipv4() { "ipv4" } else { "ipv6" }; + span.record(NETWORK_TYPE, family); + span.record(NETWORK_LOCAL_ADDRESS, local.ip().to_string()); + span.record(NETWORK_LOCAL_PORT, local.port()); + span.record(NETWORK_PEER_ADDRESS, peer.ip().to_string()); + span.record(NETWORK_PEER_PORT, peer.port()); + } else { + tracing::warn!("No HttpInfo injected in response extensions"); + } + + metrics_labels.push(KeyValue::new( + HTTP_RESPONSE_STATUS_CODE, + i64::from(response.status().as_u16()), + )); + + Ok(response) + } + Err(err) => { + span.record("otel.status_code", "ERROR"); + span.record("rust.error", &err as &dyn std::error::Error); + + metrics_labels.push(KeyValue::new(ERROR_TYPE, "NO_RESPONSE")); + + Err(err) + } + }; + + HTTP_REQUESTS_DURATION_HISTOGRAM.record(duration, &metrics_labels); + + result + } + .instrument(span) + .await +} + +/// An extension trait implemented for [`reqwest::RequestBuilder`] to send a +/// request with a tracing span, and span context propagated. +pub trait RequestBuilderExt { + /// Send the request with a tracing span, and span context propagated. + fn send_traced(self) -> impl Future> + Send; +} + +impl RequestBuilderExt for reqwest::RequestBuilder { + fn send_traced(self) -> impl Future> + Send { + send_traced(self) + } +} diff --git a/matrix-authentication-service/crates/i18n-scan/Cargo.toml b/matrix-authentication-service/crates/i18n-scan/Cargo.toml new file mode 100644 index 00000000..cfb1bd66 --- /dev/null +++ b/matrix-authentication-service/crates/i18n-scan/Cargo.toml @@ -0,0 +1,28 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-i18n-scan" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +camino.workspace = true +clap.workspace = true +minijinja.workspace = true +serde_json.workspace = true +tracing-subscriber.workspace = true +tracing.workspace = true +walkdir.workspace = true + +mas-i18n.workspace = true diff --git a/matrix-authentication-service/crates/i18n-scan/src/key.rs b/matrix-authentication-service/crates/i18n-scan/src/key.rs new file mode 100644 index 00000000..2a3b276d --- /dev/null +++ b/matrix-authentication-service/crates/i18n-scan/src/key.rs @@ -0,0 +1,118 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_i18n::{Message, translations::TranslationTree}; +use minijinja::machinery::Span; + +pub struct Context { + keys: Vec, + func: String, + current_file: Option, +} + +impl Context { + pub fn new(func: String) -> Self { + Self { + keys: Vec::new(), + func, + current_file: None, + } + } + + pub fn set_current_file(&mut self, file: &str) { + self.current_file = Some(file.to_owned()); + } + + pub fn record(&mut self, key: Key) { + self.keys.push(key); + } + + pub fn func(&self) -> &str { + &self.func + } + + pub fn add_missing(&self, translation_tree: &mut TranslationTree) -> usize { + let mut count = 0; + for translatable in &self.keys { + let message = Message::from_literal(String::new()); + + let location = translatable.location.as_ref().map(|location| { + if location.span.start_line == location.span.end_line { + format!( + "{}:{}:{}-{}", + location.file, + location.span.start_line, + location.span.start_col, + location.span.end_col + ) + } else { + format!( + "{}:{}:{}-{}:{}", + location.file, + location.span.start_line, + location.span.start_col, + location.span.end_line, + location.span.end_col + ) + } + }); + + let key = translatable + .name + .split('.') + .chain(if translatable.kind == Kind::Plural { + Some("other") + } else { + None + }); + + if translation_tree.set_if_not_defined(key, message, location) { + count += 1; + } + } + count + } + + pub fn set_key_location(&self, mut key: Key, span: Span) -> Key { + if let Some(file) = &self.current_file { + key.location = Some(Location { + file: file.clone(), + span, + }); + } + + key + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Kind { + Message, + Plural, +} + +#[derive(Debug, Clone)] +pub struct Location { + file: String, + span: Span, +} + +#[derive(Debug, Clone)] +pub struct Key { + kind: Kind, + name: String, + location: Option, +} + +impl Key { + pub fn new(kind: Kind, name: String) -> Self { + Self { + kind, + name, + location: None, + } + } +} diff --git a/matrix-authentication-service/crates/i18n-scan/src/main.rs b/matrix-authentication-service/crates/i18n-scan/src/main.rs new file mode 100644 index 00000000..7f0824c3 --- /dev/null +++ b/matrix-authentication-service/crates/i18n-scan/src/main.rs @@ -0,0 +1,123 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// Without the custom_syntax feature, the `SyntaxConfig` is a unit struct +// which is annoying with this clippy lint +#![allow(clippy::default_constructed_unit_structs)] + +use std::{fs::File, io::BufReader}; + +use ::minijinja::{machinery::WhitespaceConfig, syntax::SyntaxConfig}; +use camino::Utf8PathBuf; +use clap::Parser; +use key::Context; +use mas_i18n::translations::TranslationTree; + +mod key; +mod minijinja; + +/// Scan a directory of templates for usage of the translation function and +/// output a translation tree. +#[derive(Parser)] +struct Options { + /// The directory containing the templates + templates: Utf8PathBuf, + + /// Path of the existing translation file + existing: Option, + + /// The extensions of the templates + #[clap(long, default_value = "html,txt,subject")] + extensions: String, + + /// The name of the translation function + #[clap(long, default_value = "_")] + function: String, + + /// Whether the existing translation file should be updated with missing + /// keys in-place + #[clap(long)] + update: bool, +} + +fn main() { + tracing_subscriber::fmt::init(); + + let options = Options::parse(); + + // Open the existing translation file if one was provided + let mut tree = if let Some(path) = &options.existing { + let file = File::open(path).expect("Failed to open existing translation file"); + let mut reader = BufReader::new(file); + serde_json::from_reader(&mut reader).expect("Failed to parse existing translation file") + } else { + TranslationTree::default() + }; + + let mut context = Context::new(options.function); + + for entry in walkdir::WalkDir::new(&options.templates) { + let entry = entry.unwrap(); + if !entry.file_type().is_file() { + continue; + } + + let path: Utf8PathBuf = entry.into_path().try_into().expect("Non-UTF8 path"); + let relative = path.strip_prefix(&options.templates).expect("Invalid path"); + + let Some(extension) = path.extension() else { + continue; + }; + + if options.extensions.split(',').any(|e| e == extension) { + tracing::debug!("Parsing {relative}"); + let template = std::fs::read_to_string(&path).expect("Failed to read template"); + match minijinja::parse( + &template, + relative.as_str(), + SyntaxConfig::default(), + WhitespaceConfig::default(), + ) { + Ok(ast) => { + context.set_current_file(relative.as_str()); + minijinja::find_in_stmt(&mut context, &ast).unwrap(); + } + Err(err) => { + tracing::error!("Failed to parse {relative}: {}", err); + } + } + } + } + + let count = context.add_missing(&mut tree); + + match count { + 0 => tracing::debug!("No missing keys"), + 1 => tracing::info!("Added 1 missing key"), + count => tracing::info!("Added {} missing keys", count), + } + + if options.update { + let mut file = File::options() + .write(true) + .read(false) + .truncate(true) + .open( + options + .existing + .expect("--update requires an existing translation file"), + ) + .expect("Failed to open existing translation file"); + + serde_json::to_writer_pretty(&mut file, &tree).expect("Failed to write translation tree"); + } else { + serde_json::to_writer_pretty(std::io::stdout(), &tree) + .expect("Failed to write translation tree"); + } + + // Just to make sure we don't end up with a trailing newline + println!(); +} diff --git a/matrix-authentication-service/crates/i18n-scan/src/minijinja.rs b/matrix-authentication-service/crates/i18n-scan/src/minijinja.rs new file mode 100644 index 00000000..63aa63ae --- /dev/null +++ b/matrix-authentication-service/crates/i18n-scan/src/minijinja.rs @@ -0,0 +1,398 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +pub use minijinja::machinery::parse; +use minijinja::{ + ErrorKind, + machinery::ast::{Call, CallArg, Const, Expr, Macro, Spanned, Stmt}, +}; + +use crate::key::{Context, Key}; + +pub fn find_in_stmt<'a>(context: &mut Context, stmt: &'a Stmt<'a>) -> Result<(), minijinja::Error> { + match stmt { + Stmt::Template(template) => find_in_stmts(context, &template.children)?, + Stmt::EmitExpr(emit_expr) => find_in_expr(context, &emit_expr.expr)?, + Stmt::EmitRaw(_raw) => {} + Stmt::ForLoop(for_loop) => { + find_in_expr(context, &for_loop.iter)?; + find_in_optional_expr(context, for_loop.filter_expr.as_ref())?; + find_in_expr(context, &for_loop.target)?; + find_in_stmts(context, &for_loop.body)?; + find_in_stmts(context, &for_loop.else_body)?; + } + Stmt::IfCond(if_cond) => { + find_in_expr(context, &if_cond.expr)?; + find_in_stmts(context, &if_cond.true_body)?; + find_in_stmts(context, &if_cond.false_body)?; + } + Stmt::WithBlock(with_block) => { + find_in_stmts(context, &with_block.body)?; + for (left, right) in &with_block.assignments { + find_in_expr(context, left)?; + find_in_expr(context, right)?; + } + } + Stmt::Set(set) => { + find_in_expr(context, &set.target)?; + find_in_expr(context, &set.expr)?; + } + Stmt::SetBlock(set_block) => { + find_in_expr(context, &set_block.target)?; + find_in_stmts(context, &set_block.body)?; + if let Some(expr) = &set_block.filter { + find_in_expr(context, expr)?; + } + } + Stmt::AutoEscape(auto_escape) => { + find_in_expr(context, &auto_escape.enabled)?; + find_in_stmts(context, &auto_escape.body)?; + } + Stmt::FilterBlock(filter_block) => { + find_in_expr(context, &filter_block.filter)?; + find_in_stmts(context, &filter_block.body)?; + } + Stmt::Block(block) => { + find_in_stmts(context, &block.body)?; + } + Stmt::Import(import) => { + find_in_expr(context, &import.name)?; + find_in_expr(context, &import.expr)?; + } + Stmt::FromImport(from_import) => { + find_in_expr(context, &from_import.expr)?; + for (name, alias) in &from_import.names { + find_in_expr(context, name)?; + find_in_optional_expr(context, alias.as_ref())?; + } + } + Stmt::Extends(extends) => { + find_in_expr(context, &extends.name)?; + } + Stmt::Include(include) => { + find_in_expr(context, &include.name)?; + } + Stmt::Macro(macro_) => { + find_in_macro(context, macro_)?; + } + Stmt::CallBlock(call_block) => { + find_in_call(context, &call_block.call)?; + find_in_macro(context, &call_block.macro_decl)?; + } + Stmt::Do(do_) => { + find_in_call(context, &do_.call)?; + } + } + + Ok(()) +} + +fn as_const<'a>(call_arg: &'a CallArg<'a>) -> Option<&'a Const> { + match call_arg { + CallArg::Pos(Expr::Const(const_)) => Some(const_), + _ => None, + } +} + +fn find_in_macro<'a>(context: &mut Context, macro_: &'a Macro<'a>) -> Result<(), minijinja::Error> { + find_in_stmts(context, ¯o_.body)?; + find_in_exprs(context, ¯o_.args)?; + find_in_exprs(context, ¯o_.defaults)?; + + Ok(()) +} + +fn find_in_call<'a>( + context: &mut Context, + call: &'a Spanned>, +) -> Result<(), minijinja::Error> { + let span = call.span(); + if let Expr::Var(var_) = &call.expr + && var_.id == context.func() + { + let key = call + .args + .first() + .and_then(as_const) + .and_then(|const_| const_.value.as_str()) + .ok_or(minijinja::Error::new( + ErrorKind::UndefinedError, + "t() first argument must be a string literal", + ))?; + + let has_count = call + .args + .iter() + .any(|arg| matches!(arg, CallArg::Kwarg("count", _))); + + let key = Key::new( + if has_count { + crate::key::Kind::Plural + } else { + crate::key::Kind::Message + }, + key.to_owned(), + ); + + let key = context.set_key_location(key, span); + + context.record(key); + } + + find_in_expr(context, &call.expr)?; + find_in_call_args(context, &call.args)?; + + Ok(()) +} + +fn find_in_call_args<'a>( + context: &mut Context, + args: &'a [CallArg<'a>], +) -> Result<(), minijinja::Error> { + for arg in args { + find_in_call_arg(context, arg)?; + } + + Ok(()) +} + +fn find_in_call_arg<'a>( + context: &mut Context, + arg: &'a CallArg<'a>, +) -> Result<(), minijinja::Error> { + match arg { + CallArg::Pos(expr) + | CallArg::Kwarg(_, expr) + | CallArg::PosSplat(expr) + | CallArg::KwargSplat(expr) => find_in_expr(context, expr), + } +} + +fn find_in_stmts<'a>(context: &mut Context, stmts: &'a [Stmt<'a>]) -> Result<(), minijinja::Error> { + for stmt in stmts { + find_in_stmt(context, stmt)?; + } + + Ok(()) +} + +fn find_in_expr<'a>(context: &mut Context, expr: &'a Expr<'a>) -> Result<(), minijinja::Error> { + match expr { + Expr::Var(_var) => {} + Expr::Const(_const) => {} + Expr::Slice(slice) => { + find_in_expr(context, &slice.expr)?; + find_in_optional_expr(context, slice.start.as_ref())?; + find_in_optional_expr(context, slice.stop.as_ref())?; + find_in_optional_expr(context, slice.step.as_ref())?; + } + Expr::UnaryOp(unary_op) => { + find_in_expr(context, &unary_op.expr)?; + } + Expr::BinOp(bin_op) => { + find_in_expr(context, &bin_op.left)?; + find_in_expr(context, &bin_op.right)?; + } + Expr::IfExpr(if_expr) => { + find_in_expr(context, &if_expr.test_expr)?; + find_in_expr(context, &if_expr.true_expr)?; + find_in_optional_expr(context, if_expr.false_expr.as_ref())?; + } + Expr::Filter(filter) => { + find_in_optional_expr(context, filter.expr.as_ref())?; + find_in_call_args(context, &filter.args)?; + } + Expr::Test(test) => { + find_in_expr(context, &test.expr)?; + find_in_call_args(context, &test.args)?; + } + Expr::GetAttr(get_attr) => { + find_in_expr(context, &get_attr.expr)?; + } + Expr::GetItem(get_item) => { + find_in_expr(context, &get_item.expr)?; + find_in_expr(context, &get_item.subscript_expr)?; + } + Expr::Call(call) => { + find_in_call(context, call)?; + } + Expr::List(list) => { + find_in_exprs(context, &list.items)?; + } + Expr::Map(map) => { + find_in_exprs(context, &map.keys)?; + find_in_exprs(context, &map.values)?; + } + } + + Ok(()) +} + +fn find_in_exprs<'a>(context: &mut Context, exprs: &'a [Expr<'a>]) -> Result<(), minijinja::Error> { + for expr in exprs { + find_in_expr(context, expr)?; + } + + Ok(()) +} + +fn find_in_optional_expr<'a>( + context: &mut Context, + expr: Option<&'a Expr<'a>>, +) -> Result<(), minijinja::Error> { + if let Some(expr) = expr { + find_in_expr(context, expr)?; + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use minijinja::{machinery::WhitespaceConfig, syntax::SyntaxConfig}; + + use super::*; + + #[test] + fn test_find_keys() { + let mut context = Context::new("t".to_owned()); + let templates = [ + ("hello.txt", r#"Hello {{ t("world") }}"#), + ("existing.txt", r#"{{ t("hello") }}"#), + ("plural.txt", r#"{{ t("plural", count=4) }}"#), + // Kitchen sink to make sure we're going through the whole AST + ( + "macros.txt", + r#" + {% macro test(arg="foo") %} + {% if function() == foo is test(t("nested.1")) %} + {% set foo = t("nested.2", arg=5 + 2) ~ "foo" in test %} + {{ foo | bar }} + {% else %} + {% for i in [t("nested.3", extra=t("nested.4")), "foo"] %} + {{ i | foo }} + {% else %} + {{ t("nested.5") }} + {% endfor %} + {% endif %} + {% endmacro %} + "#, + ), + ( + "nested.txt", + r#" + {% import "macros.txt" as macros %} + {% block test %} + {% filter upper %} + {{ macros.test(arg=t("nested.6")) }} + {% endfilter %} + {% endblock test %} + "#, + ), + ]; + + for (name, content) in templates { + let ast = parse( + content, + name, + SyntaxConfig::default(), + WhitespaceConfig::default(), + ) + .unwrap(); + find_in_stmt(&mut context, &ast).unwrap(); + } + + let mut tree = serde_json::from_value(serde_json::json!({ + "hello": "Hello!", + })) + .unwrap(); + + context.add_missing(&mut tree); + let tree = serde_json::to_value(&tree).unwrap(); + assert_eq!( + tree, + serde_json::json!({ + "hello": "Hello!", + "world": "", + "plural": { + "other": "" + }, + "nested": { + "1": "", + "2": "", + "3": "", + "4": "", + "5": "", + "6": "", + }, + }) + ); + } + + #[test] + fn test_invalid_key_not_string() { + // This is invalid because the key is not a string + let mut context = Context::new("t".to_owned()); + let ast = parse( + r"{{ t(5) }}", + "invalid.txt", + SyntaxConfig::default(), + WhitespaceConfig::default(), + ) + .unwrap(); + + let res = find_in_stmt(&mut context, &ast); + assert!(res.is_err()); + } + + #[test] + fn test_invalid_key_filtered() { + // This is invalid because the key argument has a filter + let mut context = Context::new("t".to_owned()); + let ast = parse( + r#"{{ t("foo" | bar) }}"#, + "invalid.txt", + SyntaxConfig::default(), + WhitespaceConfig::default(), + ) + .unwrap(); + + let res = find_in_stmt(&mut context, &ast); + assert!(res.is_err()); + } + + #[test] + fn test_invalid_key_missing() { + // This is invalid because the key argument is missing + let mut context = Context::new("t".to_owned()); + let ast = parse( + r"{{ t() }}", + "invalid.txt", + SyntaxConfig::default(), + WhitespaceConfig::default(), + ) + .unwrap(); + + let res = find_in_stmt(&mut context, &ast); + assert!(res.is_err()); + } + + #[test] + fn test_invalid_key_negated() { + // This is invalid because the key argument is missing + let mut context = Context::new("t".to_owned()); + let ast = parse( + r#"{{ t(not "foo") }}"#, + "invalid.txt", + SyntaxConfig::default(), + WhitespaceConfig::default(), + ) + .unwrap(); + + let res = find_in_stmt(&mut context, &ast); + assert!(res.is_err()); + } +} diff --git a/matrix-authentication-service/crates/i18n/Cargo.toml b/matrix-authentication-service/crates/i18n/Cargo.toml new file mode 100644 index 00000000..8d04dc73 --- /dev/null +++ b/matrix-authentication-service/crates/i18n/Cargo.toml @@ -0,0 +1,35 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-i18n" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +camino.workspace = true +icu_calendar.workspace = true +icu_datetime.workspace = true +icu_experimental.workspace = true +icu_locid_transform.workspace = true +icu_locid.workspace = true +icu_plurals.workspace = true +icu_provider_adapters.workspace = true +icu_provider.workspace = true +pad.workspace = true +pest_derive.workspace = true +pest.workspace = true +serde_json.workspace = true +serde.workspace = true +thiserror.workspace = true +writeable.workspace = true diff --git a/matrix-authentication-service/crates/i18n/src/lib.rs b/matrix-authentication-service/crates/i18n/src/lib.rs new file mode 100644 index 00000000..a4dfc7e2 --- /dev/null +++ b/matrix-authentication-service/crates/i18n/src/lib.rs @@ -0,0 +1,19 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +pub mod sprintf; +pub mod translations; +mod translator; + +pub use icu_calendar; +pub use icu_datetime; +pub use icu_locid::locale; +pub use icu_provider::{DataError, DataLocale}; + +pub use self::{ + sprintf::{Argument, ArgumentList, Message}, + translator::{LoadError, Translator}, +}; diff --git a/matrix-authentication-service/crates/i18n/src/sprintf/argument.rs b/matrix-authentication-service/crates/i18n/src/sprintf/argument.rs new file mode 100644 index 00000000..c83ac0e2 --- /dev/null +++ b/matrix-authentication-service/crates/i18n/src/sprintf/argument.rs @@ -0,0 +1,143 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashMap; + +use serde_json::Value; + +/// A list of arguments that can be accessed by index or name. +#[derive(Debug, Clone, Default)] +pub struct List { + arguments: Vec, + name_index: HashMap, +} + +impl List { + /// Get an argument by its index. + #[must_use] + pub fn get_by_index(&self, index: usize) -> Option<&Value> { + self.arguments.get(index) + } + + /// Get an argument by its name. + #[must_use] + pub fn get_by_name(&self, name: &str) -> Option<&Value> { + self.name_index + .get(name) + .and_then(|index| self.get_by_index(*index)) + } +} + +impl> FromIterator for List { + fn from_iter>(iter: T) -> Self { + let mut arguments = Vec::new(); + let mut name_index = HashMap::new(); + + for (index, argument) in iter.into_iter().enumerate() { + let argument = argument.into(); + if let Some(name) = argument.name { + name_index.insert(name.clone(), index); + } + + arguments.push(argument.value); + } + + Self { + arguments, + name_index, + } + } +} + +/// A single argument value. +pub struct Argument { + name: Option, + value: Value, +} + +impl From for Argument { + fn from(value: Value) -> Self { + Self { name: None, value } + } +} + +impl From<(&str, Value)> for Argument { + fn from((name, value): (&str, Value)) -> Self { + Self { + name: Some(name.to_owned()), + value, + } + } +} + +impl From<(String, Value)> for Argument { + fn from((name, value): (String, Value)) -> Self { + Self { + name: Some(name), + value, + } + } +} + +impl Argument { + /// Create a new argument with the given name and value. + #[must_use] + pub fn named(name: String, value: Value) -> Self { + Self { + name: Some(name), + value, + } + } + + /// Set the name of the argument. + #[must_use] + pub fn with_name(mut self, name: String) -> Self { + self.name = Some(name); + self + } +} + +#[cfg(test)] +mod tests { + use serde_json::json; + + use super::*; + + #[test] + fn test_argument_list() { + let list = List::from_iter([ + ("hello", json!("world")), + ("alice", json!(null)), + ("bob", json!(42)), + ]); + + assert_eq!(list.get_by_index(0), Some(&json!("world"))); + assert_eq!(list.get_by_index(1), Some(&json!(null))); + assert_eq!(list.get_by_index(2), Some(&json!(42))); + assert_eq!(list.get_by_index(3), None); + + assert_eq!(list.get_by_name("hello"), Some(&json!("world"))); + assert_eq!(list.get_by_name("alice"), Some(&json!(null))); + assert_eq!(list.get_by_name("bob"), Some(&json!(42))); + assert_eq!(list.get_by_name("charlie"), None); + + let list = List::from_iter([ + Argument::from(json!("hello")), + Argument::named("alice".to_owned(), json!(null)), + Argument::named("bob".to_owned(), json!(42)), + ]); + + assert_eq!(list.get_by_index(0), Some(&json!("hello"))); + assert_eq!(list.get_by_index(1), Some(&json!(null))); + assert_eq!(list.get_by_index(2), Some(&json!(42))); + assert_eq!(list.get_by_index(3), None); + + assert_eq!(list.get_by_name("hello"), None); + assert_eq!(list.get_by_name("alice"), Some(&json!(null))); + assert_eq!(list.get_by_name("bob"), Some(&json!(42))); + assert_eq!(list.get_by_name("charlie"), None); + } +} diff --git a/matrix-authentication-service/crates/i18n/src/sprintf/formatter.rs b/matrix-authentication-service/crates/i18n/src/sprintf/formatter.rs new file mode 100644 index 00000000..29c71f6a --- /dev/null +++ b/matrix-authentication-service/crates/i18n/src/sprintf/formatter.rs @@ -0,0 +1,598 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::fmt::Formatter; + +use pad::{Alignment, PadStr}; +use serde::Serialize; +use serde_json::{Value, ser::PrettyFormatter}; +use thiserror::Error; + +use super::{ArgumentList, Message}; +use crate::sprintf::message::{ + ArgumentReference, PaddingSpecifier, Part, Placeholder, TypeSpecifier, +}; + +macro_rules! format_placeholder { + ($value:expr, $type:literal, $placeholder:expr) => { + format_step_plus_sign!($value, $type, $placeholder, "",) + }; + ($value:expr, $placeholder:expr) => { + format_placeholder!($value, "", $placeholder) + }; +} + +macro_rules! format_step_plus_sign { + ($value:expr, $type:literal, $placeholder:expr, $modifiers:expr, $($argk:ident = $argv:expr),* $(,)?) => {{ + if $placeholder.plus_sign { + format_step_zero!( + $value, + $type, + $placeholder, + concat!($modifiers, "+"), + $($argk = $argv),* + ) + } else { + format_step_zero!( + $value, + $type, + $placeholder, + $modifiers, + $($argk = $argv),* + ) + } + }}; +} + +macro_rules! format_step_zero { + ($value:expr, $type:literal, $placeholder:expr, $modifiers:expr, $($argk:ident = $argv:expr),* $(,)?) => {{ + if $placeholder.padding_specifier_is_zero() { + format_step_width!( + $value, + $type, + $placeholder, + concat!($modifiers, "0"), + $($argk = $argv),* + ) + } else { + format_step_width!( + $value, + $type, + $placeholder, + $modifiers, + $($argk = $argv),* + ) + } + }}; +} + +macro_rules! format_step_width { + ($value:expr, $type:literal, $placeholder:expr, $modifiers:expr, $($argk:ident = $argv:expr),* $(,)?) => {{ + if let Some(width) = $placeholder.numeric_width() { + format_step_precision!( + $value, + $type, + $placeholder, + concat!($modifiers, "width$"), + width = width, + $($argk = $argv),* + ) + } else { + format_step_precision!( + $value, + $type, + $placeholder, + $modifiers, + $($argk = $argv),* + ) + } + }}; +} + +macro_rules! format_step_precision { + ($value:expr, $type:literal, $placeholder:expr, $modifiers:expr, $($argk:ident = $argv:expr),* $(,)?) => {{ + if let Some(precision) = $placeholder.precision { + format_end!( + $value, + $type, + $placeholder, + concat!($modifiers, ".precision$"), + precision = precision, + $($argk = $argv),* + ) + } else { + format_end!( + $value, + $type, + $placeholder, + $modifiers, + $($argk = $argv),* + ) + } + }}; +} + +macro_rules! format_end { + ($value:expr, $type:literal, $placeholder:expr, $modifiers:expr, $($argk:ident = $argv:expr),* $(,)?) => { + format!(concat!("{value:", $modifiers, $type, "}"), value = $value, $($argk = $argv),*) + }; +} + +#[derive(Debug)] +pub enum ValueType { + String, + Number, + Float, + Null, + Bool, + Array, + Object, +} + +impl ValueType { + fn of_value(value: &Value) -> Self { + match value { + Value::String(_) => Self::String, + Value::Number(_) => Self::Number, + Value::Null => Self::Null, + Value::Bool(_) => Self::Bool, + Value::Array(_) => Self::Array, + Value::Object(_) => Self::Object, + } + } +} + +#[derive(Debug, Error)] +pub enum FormatError { + #[error("Can't format a {value_type:?} as a %{type_specifier}")] + InvalidTypeSpecifier { + type_specifier: TypeSpecifier, + value_type: ValueType, + }, + + #[error("Unsupported type specifier %{type_specifier}")] + UnsupportedTypeSpecifier { type_specifier: TypeSpecifier }, + + #[error("Unexpected number type")] + NumberIsNotANumber, + + #[error("Unknown named argument {name}")] + UnknownNamedArgument { name: String }, + + #[error("Unknown indexed argument {index}")] + UnknownIndexedArgument { index: usize }, + + #[error("Not enough arguments")] + NotEnoughArguments, + + #[error("Can't serialize value")] + Serialize(#[from] serde_json::Error), + + #[error("Can't convert value to UTF-8")] + InvalidUtf8(#[from] std::string::FromUtf8Error), +} + +fn find_value<'a>( + arguments: &'a ArgumentList, + requested_argument: Option<&ArgumentReference>, + current_index: usize, +) -> Result<&'a Value, FormatError> { + match requested_argument { + Some(ArgumentReference::Named(name)) => arguments + .get_by_name(name) + .ok_or(FormatError::UnknownNamedArgument { name: name.clone() }), + + Some(ArgumentReference::Indexed(index)) => arguments + .get_by_index(*index - 1) + .ok_or(FormatError::UnknownIndexedArgument { index: *index }), + + None => arguments + .get_by_index(current_index) + .ok_or(FormatError::NotEnoughArguments), + } +} + +/// An approximation of JS's Number.prototype.toPrecision +fn to_precision(number: f64, mut placeholder: Placeholder) -> String { + // If the precision is not set, then we just format the number as normal + let Some(precision) = placeholder.precision else { + return format_placeholder!(number, &placeholder); + }; + + // This treats NaN, Infinity, -Infinity and zero without any special handling + if !number.is_normal() { + return format_placeholder!(number, &placeholder); + } + + // This tells us how many numbers are before the decimal point + // This lossy cast is fine because we only care about the order of magnitude, + // and special cases are handled above + #[allow(clippy::cast_possible_truncation)] + let log10 = number.abs().log10().floor() as i64; + let precision_i64 = precision.try_into().unwrap_or(i64::MAX); + // We can fit the number in the precision, so we just format it as normal + if log10 > 0 && log10 <= precision_i64 || number.abs() < 10.0 { + // We remove the number of digits before the decimal point from the precision + placeholder.precision = Some(precision - 1 - log10.try_into().unwrap_or(0usize)); + format_placeholder!(number, &placeholder) + } else { + // Else in scientific notation there is always one digit before the decimal + // point + placeholder.precision = Some(precision - 1); + format_placeholder!(number, "e", &placeholder) + } +} + +#[allow(clippy::match_same_arms)] +fn format_value(value: &Value, placeholder: &Placeholder) -> Result { + match (value, &placeholder.type_specifier) { + (Value::Number(number), ts @ TypeSpecifier::BinaryNumber) => { + if let Some(number) = number.as_u64() { + Ok(format_placeholder!(number, "b", placeholder)) + } else if let Some(number) = number.as_i64() { + Ok(format_placeholder!(number, "b", placeholder)) + } else { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::Float, + }) + } + } + (v, ts @ TypeSpecifier::BinaryNumber) => Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }), + + (Value::String(string), TypeSpecifier::CharacterAsciiValue) if string.len() == 1 => { + Ok(format_placeholder!(string, placeholder)) + } + (Value::Number(n), TypeSpecifier::CharacterAsciiValue) => { + if let Some(character) = n + .as_u64() + .and_then(|n| u32::try_from(n).ok()) + .and_then(|n| char::try_from(n).ok()) + { + Ok(format_placeholder!(character, placeholder)) + } else { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: TypeSpecifier::CharacterAsciiValue, + value_type: ValueType::Number, + }) + } + } + (v, ts @ TypeSpecifier::CharacterAsciiValue) => Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }), + + ( + Value::Number(number), + ts @ (TypeSpecifier::DecimalNumber | TypeSpecifier::IntegerNumber), + ) => { + if let Some(number) = number.as_u64() { + Ok(format_placeholder!(number, placeholder)) + } else if let Some(number) = number.as_i64() { + Ok(format_placeholder!(number, placeholder)) + } else { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::Float, + }) + } + } + (v, ts @ (TypeSpecifier::DecimalNumber | TypeSpecifier::IntegerNumber)) => { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }) + } + + (Value::Number(number), ts @ TypeSpecifier::UnsignedDecimalNumber) => { + if let Some(number) = number.as_u64() { + Ok(format_placeholder!(number, placeholder)) + } else if let Some(number) = number.as_i64() { + // Truncate to a i32 and then u32 to mimic JS's behaviour + #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] + let number = number as i32 as u32; + Ok(format_placeholder!(number, placeholder)) + } else { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::Float, + }) + } + } + (v, ts @ TypeSpecifier::UnsignedDecimalNumber) => Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }), + + (Value::Number(number), TypeSpecifier::ScientificNotation) => { + if let Some(number) = number.as_u64() { + Ok(format_placeholder!(number, "e", placeholder)) + } else if let Some(number) = number.as_i64() { + Ok(format_placeholder!(number, "e", placeholder)) + } else if let Some(number) = number.as_f64() { + Ok(format_placeholder!(number, "e", placeholder)) + } else { + // This should never happen + Err(FormatError::NumberIsNotANumber) + } + } + (v, ts @ TypeSpecifier::ScientificNotation) => Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }), + + (Value::Number(number), TypeSpecifier::FloatingPointNumber) => { + if let Some(number) = number.as_u64() { + Ok(format_placeholder!(number, placeholder)) + } else if let Some(number) = number.as_i64() { + Ok(format_placeholder!(number, placeholder)) + } else if let Some(number) = number.as_f64() { + Ok(format_placeholder!(number, placeholder)) + } else { + // This should never happen + Err(FormatError::NumberIsNotANumber) + } + } + (v, ts @ TypeSpecifier::FloatingPointNumber) => Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }), + + (Value::Number(number), TypeSpecifier::FloatingPointNumberWithSignificantDigits) => { + if let Some(number) = number.as_f64() { + Ok(to_precision(number, placeholder.clone())) + } else { + // This might happen if the integer is too big to be represented as a f64 + Err(FormatError::NumberIsNotANumber) + } + } + (v, ts @ TypeSpecifier::FloatingPointNumberWithSignificantDigits) => { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }) + } + + (Value::Number(number), ts @ TypeSpecifier::OctalNumber) => { + if let Some(number) = number.as_u64() { + Ok(format_placeholder!(number, "o", placeholder)) + } else if let Some(number) = number.as_i64() { + // Truncate to a i32 and then u32 to mimic JS's behaviour + #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] + let number = number as i32 as u32; + Ok(format_placeholder!(number, "o", placeholder)) + } else { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::Float, + }) + } + } + (v, ts @ TypeSpecifier::OctalNumber) => Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }), + + (Value::String(string), TypeSpecifier::String) => { + Ok(format_placeholder!(string, placeholder)) + } + (Value::Number(number), TypeSpecifier::String) => { + let string = format!("{number}"); + Ok(format_placeholder!(string, placeholder)) + } + (v, ts @ TypeSpecifier::String) => Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }), + + (Value::Bool(boolean), TypeSpecifier::TrueOrFalse) => { + Ok(format_placeholder!(boolean, placeholder)) + } + (v, ts @ TypeSpecifier::TrueOrFalse) => Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }), + + (v, TypeSpecifier::TypeOfArgument) => match v { + Value::String(_) => Ok("string".to_owned()), + Value::Number(_) => Ok("number".to_owned()), + Value::Null => Ok("null".to_owned()), + Value::Bool(_) => Ok("boolean".to_owned()), + Value::Array(_) => Ok("array".to_owned()), + Value::Object(_) => Ok("object".to_owned()), + }, + + // Unimplemented + (_v, TypeSpecifier::PrimitiveValue) => Err(FormatError::UnsupportedTypeSpecifier { + type_specifier: placeholder.type_specifier, + }), + + (Value::Number(n), ts @ TypeSpecifier::HexadecimalNumberLowercase) => { + if let Some(number) = n.as_u64() { + Ok(format_placeholder!(number, "x", placeholder)) + } else if let Some(number) = n.as_i64() { + // Truncate to a i32 and then u32 to mimic JS's behaviour + #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] + let number = number as i32 as u32; + Ok(format_placeholder!(number, "x", placeholder)) + } else { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::Float, + }) + } + } + (v, ts @ TypeSpecifier::HexadecimalNumberLowercase) => { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }) + } + + (Value::Number(n), ts @ TypeSpecifier::HexadecimalNumberUppercase) => { + if let Some(number) = n.as_u64() { + Ok(format_placeholder!(number, "X", placeholder)) + } else if let Some(number) = n.as_i64() { + // Truncate to a i32 and then u32 to mimic JS's behaviour + #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] + let number = number as i32 as u32; + Ok(format_placeholder!(number, "X", placeholder)) + } else { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::Float, + }) + } + } + (v, ts @ TypeSpecifier::HexadecimalNumberUppercase) => { + Err(FormatError::InvalidTypeSpecifier { + type_specifier: *ts, + value_type: ValueType::of_value(v), + }) + } + + (value, TypeSpecifier::Json) => { + let mut json = Vec::new(); + if let Some(width) = placeholder.width { + let indent = b" ".repeat(width); + let mut serializer = serde_json::Serializer::with_formatter( + &mut json, + PrettyFormatter::with_indent(indent.as_slice()), + ); + value.serialize(&mut serializer)?; + } else { + let mut serializer = serde_json::Serializer::new(&mut json); + value.serialize(&mut serializer)?; + } + let json = String::from_utf8(json)?; + Ok(format_placeholder!(json, placeholder)) + } + } +} + +pub enum FormattedMessagePart<'a> { + /// A literal text part of the message. It should not be escaped. + Text(&'a str), + /// A placeholder part of the message. It should be escaped. + Placeholder(String), +} + +impl FormattedMessagePart<'_> { + fn len(&self) -> usize { + match self { + FormattedMessagePart::Text(text) => text.len(), + FormattedMessagePart::Placeholder(placeholder) => placeholder.len(), + } + } +} + +impl std::fmt::Display for FormattedMessagePart<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + FormattedMessagePart::Text(text) => write!(f, "{text}"), + FormattedMessagePart::Placeholder(placeholder) => write!(f, "{placeholder}"), + } + } +} + +pub struct FormattedMessage<'a> { + parts: Vec>, + total_len: usize, +} + +impl FormattedMessage<'_> { + /// Returns the length of the formatted message (not the number of parts). + #[must_use] + pub fn len(&self) -> usize { + self.total_len + } + + /// Returns `true` if the formatted message is empty. + #[must_use] + pub fn is_empty(&self) -> bool { + self.total_len == 0 + } + + /// Returns the list of parts of the formatted message. + #[must_use] + pub fn parts(&self) -> &[FormattedMessagePart<'_>] { + &self.parts + } +} + +impl std::fmt::Display for FormattedMessage<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + for part in &self.parts { + write!(f, "{part}")?; + } + Ok(()) + } +} + +impl Message { + /// Format the message with the given arguments. + /// + /// # Errors + /// + /// Returns an error if the message can't be formatted with the given + /// arguments. + pub fn format(&self, arguments: &ArgumentList) -> Result { + self.format_(arguments).map(|fm| fm.to_string()) + } + + #[doc(hidden)] + pub fn format_(&self, arguments: &ArgumentList) -> Result, FormatError> { + let mut parts = Vec::with_capacity(self.parts().len()); + + // Holds the current index of the placeholder we are formatting, which is used + // by non-named, non-indexed placeholders + let mut current_placeholder = 0usize; + // Compute the total length of the formatted message + let mut total_len = 0usize; + for part in self.parts() { + let formatted = match part { + Part::Percent => FormattedMessagePart::Text("%"), + Part::Text(text) => FormattedMessagePart::Text(text), + Part::Placeholder(placeholder) => { + let value = find_value( + arguments, + placeholder.requested_argument.as_ref(), + current_placeholder, + )?; + + let formatted = format_value(value, placeholder)?; + + // Do the extra padding which std::fmt can't really do + let formatted = if let Some(width) = placeholder.width { + let spacer = placeholder + .padding_specifier + .map_or(' ', PaddingSpecifier::char); + + let alignment = if placeholder.left_align { + Alignment::Left + } else { + Alignment::Right + }; + + formatted.pad(width, spacer, alignment, false) + } else { + formatted + }; + + current_placeholder += 1; + FormattedMessagePart::Placeholder(formatted) + } + }; + total_len += formatted.len(); + parts.push(formatted); + } + + Ok(FormattedMessage { parts, total_len }) + } +} diff --git a/matrix-authentication-service/crates/i18n/src/sprintf/grammar.pest b/matrix-authentication-service/crates/i18n/src/sprintf/grammar.pest new file mode 100644 index 00000000..6d33ad7f --- /dev/null +++ b/matrix-authentication-service/crates/i18n/src/sprintf/grammar.pest @@ -0,0 +1,75 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +message = _{ (text | percent | placeholder)* ~ EOI } + +// # Format specification: +// +// The placeholders in the format string are marked by % and are followed by one or more of these elements, in this order: +// +// - An optional number followed by a $ sign that selects which argument index to use for the value. If not specified, +// arguments will be placed in the same order as the placeholders in the input string. +arg_indexed = { !"0" ~ number ~ "$" } +// TODO: the named argument can be a complex expression +arg_named = { "(" ~ ident ~ ")" } + +// - An optional + sign that forces to precede the result with a plus or minus sign on numeric values. +// By default, only the - sign is used on negative numbers. +plus_sign = { "+" } + +// - An optional padding specifier that says what character to use for padding (if specified). +// Possible values are 0 or any other character preceded by a ' (single quote). The default is to pad with spaces. +padding_specifier = { "0" | "'" ~ ANY } + +// - An optional - sign, that causes sprintf to left-align the result of this placeholder. +// The default is to right-align the result. +left_align = { "-" } + +// - An optional number, that says how many characters the result should have. +// If the value to be returned is shorter than this number, the result will be padded. +// When used with the j (JSON) type specifier, the padding length specifies the tab size used for indentation. +width = { number } + +// - An optional precision modifier, consisting of a . (dot) followed by a number, that says how many digits should be +// displayed for floating point numbers. When used with the g type specifier, it specifies the number of significant +// digits. When used on a string, it causes the result to be truncated. +precision = { "." ~ number } + +// - A type specifier that can be any of: +// - % — yields a literal % character -- This is handled separately +// - b — yields an integer as a binary number +// - c — yields an integer as the character with that ASCII value +// - d or i — yields an integer as a signed decimal number +// - e — yields a float using scientific notation +// - u — yields an integer as an unsigned decimal number +// - f — yields a float as is; see notes on precision above +// - g — yields a float as is; see notes on precision above +// - o — yields an integer as an octal number +// - s — yields a string as is +// - t — yields true or false +// - T — yields the type of the argument +// - v — yields the primitive value of the specified argument +// - x — yields an integer as a hexadecimal number (lower-case) +// - X — yields an integer as a hexadecimal number (upper-case) +// - j — yields a JavaScript object or array as a JSON encoded string +type_specifier = { "b" | "c" | "d" | "i" | "e" | "u" | "f" | "g" | "o" | "s" | "t" | "T" | "v" | "x" | "X" | "j" } + +percent = @{ start ~ start } +placeholder = { + start ~ + (arg_indexed | arg_named)? ~ + plus_sign? ~ + padding_specifier? ~ + left_align? ~ + width? ~ + precision? ~ + type_specifier +} +text = @{ (!start ~ ANY)+ } + +start = _{ "%" } +number = @{ ASCII_NONZERO_DIGIT ~ ASCII_DIGIT* } +ident = @{ (ASCII_ALPHA | "_") ~ (ASCII_ALPHANUMERIC | "_")* } diff --git a/matrix-authentication-service/crates/i18n/src/sprintf/message.rs b/matrix-authentication-service/crates/i18n/src/sprintf/message.rs new file mode 100644 index 00000000..f92041ac --- /dev/null +++ b/matrix-authentication-service/crates/i18n/src/sprintf/message.rs @@ -0,0 +1,283 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use serde::{Deserialize, Serialize}; + +/// Specifies how to format an argument. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TypeSpecifier { + /// `b` + BinaryNumber, + + /// `c` + CharacterAsciiValue, + + /// `i` + DecimalNumber, + + /// `i` + IntegerNumber, + + /// `e` + ScientificNotation, + + /// `u` + UnsignedDecimalNumber, + + /// `f` + FloatingPointNumber, + + /// `g` + FloatingPointNumberWithSignificantDigits, + + /// `o` + OctalNumber, + + /// `s` + String, + + /// `t` + TrueOrFalse, + + /// `T` + TypeOfArgument, + + /// `v` + PrimitiveValue, + + /// `x` + HexadecimalNumberLowercase, + + /// `X` + HexadecimalNumberUppercase, + + /// `j` + Json, +} + +impl TypeSpecifier { + /// Returns true if the type specifier is a numeric type, which should be + /// specially formatted with the zero + const fn is_numeric(self) -> bool { + matches!( + self, + Self::BinaryNumber + | Self::DecimalNumber + | Self::IntegerNumber + | Self::ScientificNotation + | Self::UnsignedDecimalNumber + | Self::FloatingPointNumber + | Self::FloatingPointNumberWithSignificantDigits + | Self::OctalNumber + | Self::HexadecimalNumberLowercase + | Self::HexadecimalNumberUppercase + ) + } +} + +impl std::fmt::Display for TypeSpecifier { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let specifier = match self { + Self::BinaryNumber => 'b', + Self::CharacterAsciiValue => 'c', + Self::DecimalNumber => 'd', + Self::IntegerNumber => 'i', + Self::ScientificNotation => 'e', + Self::UnsignedDecimalNumber => 'u', + Self::FloatingPointNumber => 'f', + Self::FloatingPointNumberWithSignificantDigits => 'g', + Self::OctalNumber => 'o', + Self::String => 's', + Self::TrueOrFalse => 't', + Self::TypeOfArgument => 'T', + Self::PrimitiveValue => 'v', + Self::HexadecimalNumberLowercase => 'x', + Self::HexadecimalNumberUppercase => 'X', + Self::Json => 'j', + }; + write!(f, "{specifier}") + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ArgumentReference { + Indexed(usize), + Named(String), +} + +impl std::fmt::Display for ArgumentReference { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ArgumentReference::Indexed(index) => write!(f, "{index}$"), + ArgumentReference::Named(name) => write!(f, "({name})"), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum PaddingSpecifier { + Zero, + Char(char), +} + +impl std::fmt::Display for PaddingSpecifier { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + PaddingSpecifier::Zero => write!(f, "0"), + PaddingSpecifier::Char(c) => write!(f, "'{c}"), + } + } +} + +impl PaddingSpecifier { + pub fn char(self) -> char { + match self { + PaddingSpecifier::Zero => '0', + PaddingSpecifier::Char(c) => c, + } + } + + pub const fn is_zero(self) -> bool { + match self { + PaddingSpecifier::Zero => true, + PaddingSpecifier::Char(_) => false, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Placeholder { + pub type_specifier: TypeSpecifier, + pub requested_argument: Option, + pub plus_sign: bool, + pub padding_specifier: Option, + pub left_align: bool, + pub width: Option, + pub precision: Option, +} + +impl Placeholder { + pub fn padding_specifier_is_zero(&self) -> bool { + self.padding_specifier + .is_some_and(PaddingSpecifier::is_zero) + } + + /// Whether it should be formatted as a number for the width argument + pub fn numeric_width(&self) -> Option { + self.width + .filter(|_| self.padding_specifier_is_zero() && self.type_specifier.is_numeric()) + } +} + +impl std::fmt::Display for Placeholder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "%")?; + if let Some(argument) = &self.requested_argument { + write!(f, "{argument}")?; + } + + if self.plus_sign { + write!(f, "+")?; + } + + if let Some(padding_specifier) = &self.padding_specifier { + write!(f, "{padding_specifier}")?; + } + + if self.left_align { + write!(f, "-")?; + } + + if let Some(width) = self.width { + write!(f, "{width}")?; + } + + if let Some(precision) = self.precision { + write!(f, ".{precision}")?; + } + + write!(f, "{}", self.type_specifier) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Message { + parts: Vec, +} + +impl std::fmt::Display for Message { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for part in &self.parts { + write!(f, "{part}")?; + } + Ok(()) + } +} + +impl FromIterator for Message { + fn from_iter>(iter: T) -> Self { + Self { + parts: iter.into_iter().collect(), + } + } +} + +impl Message { + pub(crate) fn parts(&self) -> std::slice::Iter<'_, Part> { + self.parts.iter() + } + + /// Create a message from a literal string, without any placeholders. + #[must_use] + pub fn from_literal(literal: String) -> Message { + Message { + parts: vec![Part::Text(literal)], + } + } +} + +impl Serialize for Message { + fn serialize(&self, serializer: S) -> Result { + let string = self.to_string(); + serializer.serialize_str(&string) + } +} + +impl<'de> Deserialize<'de> for Message { + fn deserialize>(deserializer: D) -> Result { + let string = String::deserialize(deserializer)?; + string.parse().map_err(serde::de::Error::custom) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) enum Part { + Percent, + Text(String), + Placeholder(Placeholder), +} + +impl From for Part { + fn from(placeholder: Placeholder) -> Self { + Self::Placeholder(placeholder) + } +} + +impl From for Part { + fn from(text: String) -> Self { + Self::Text(text) + } +} + +impl std::fmt::Display for Part { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Part::Percent => write!(f, "%%"), + Part::Text(text) => write!(f, "{text}"), + Part::Placeholder(placeholder) => write!(f, "{placeholder}"), + } + } +} diff --git a/matrix-authentication-service/crates/i18n/src/sprintf/mod.rs b/matrix-authentication-service/crates/i18n/src/sprintf/mod.rs new file mode 100644 index 00000000..d58514f9 --- /dev/null +++ b/matrix-authentication-service/crates/i18n/src/sprintf/mod.rs @@ -0,0 +1,209 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(unused_macros)] + +mod argument; +mod formatter; +mod message; +mod parser; + +pub use self::{ + argument::{Argument, List as ArgumentList}, + formatter::{FormatError, FormattedMessage, FormattedMessagePart}, + message::Message, +}; + +macro_rules! arg_list_inner { + ($var:ident |) => { }; + ($var:ident | $name:ident = $($arg:expr)*, $($rest:tt)*) => {{ + $var.push($crate::sprintf::Argument::from((stringify!($name), ::serde_json::json!($($arg)*)))); + $crate::sprintf::arg_list_inner!($var | $($rest)* ); + }}; + ($var:ident | $name:ident = $($arg:expr)*) => {{ + $var.push($crate::sprintf::Argument::from((stringify!($name), ::serde_json::json!($($arg)*)))); + }}; + ($var:ident | $($arg:expr)*, $($rest:tt)*) => {{ + $var.push($crate::sprintf::Argument::from(::serde_json::json!($($arg)*))); + $crate::sprintf::arg_list_inner!($var | $($rest)* ); + }}; + ($var:ident | $($arg:expr)*) => {{ + $var.push($crate::sprintf::Argument::from(::serde_json::json!($($arg)*))); + }}; +} + +macro_rules! arg_list { + ($($args:tt)*) => {{ + let mut __args = Vec::<$crate::sprintf::Argument>::new(); + $crate::sprintf::arg_list_inner!(__args | $($args)* ); + $crate::sprintf::ArgumentList::from_iter(__args) + }} +} + +macro_rules! sprintf { + ($message:literal) => {{ + <$crate::sprintf::Message as ::std::str::FromStr>::from_str($message) + .map_err($crate::sprintf::Error::from) + .and_then(|message| { + let __args = $crate::sprintf::ArgumentList::default(); + message.format(&__args).map_err($crate::sprintf::Error::from) + }) + }}; + + ($message:literal, $($args:tt)*) => {{ + <$crate::sprintf::Message as ::std::str::FromStr>::from_str($message) + .map_err($crate::sprintf::Error::from) + .and_then(|message| { + let __args = $crate::sprintf::arg_list!($($args)*); + message.format(&__args).map_err($crate::sprintf::Error::from) + }) + }}; +} + +#[allow(unused_imports)] +pub(crate) use arg_list; +#[allow(unused_imports)] +pub(crate) use arg_list_inner; +#[allow(unused_imports)] +pub(crate) use sprintf; + +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +#[allow(dead_code)] +enum Error { + Format(#[from] self::formatter::FormatError), + Parse(Box), +} + +impl From for Error { + fn from(err: self::parser::Error) -> Self { + Self::Parse(Box::new(err)) + } +} + +#[cfg(test)] +mod tests { + use std::f64::consts::PI; + + #[test] + fn test_sprintf() { + let res = sprintf!("Hello, %(name)s!", name = "world").unwrap(); + assert_eq!(res, "Hello, world!"); + assert_eq!("%", sprintf!("%%").unwrap()); + assert_eq!("10", sprintf!("%b", 2).unwrap()); + assert_eq!("A", sprintf!("%c", 65).unwrap()); + assert_eq!("2", sprintf!("%d", 2).unwrap()); + assert_eq!("2", sprintf!("%i", 2).unwrap()); + //assert_eq!("2", sprintf!("%d", "2").unwrap()); -- We don't convert on the fly + //assert_eq!("2", sprintf!("%i", "2").unwrap()); -- We don't convert on the fly + assert_eq!( + r#"{"foo":"bar"}"#, + sprintf!("%j", serde_json::json!({"foo": "bar"})).unwrap() + ); + assert_eq!(r#"["foo","bar"]"#, sprintf!("%j", ["foo", "bar"]).unwrap()); + assert_eq!("2e0", sprintf!("%e", 2).unwrap()); // sprintf-js returns 2e+0 + assert_eq!("2", sprintf!("%u", 2).unwrap()); + assert_eq!("4294967294", sprintf!("%u", -2).unwrap()); + assert_eq!("2.2", sprintf!("%f", 2.2).unwrap()); + assert_eq!("3.141592653589793", sprintf!("%g", PI).unwrap()); + assert_eq!("10", sprintf!("%o", 8).unwrap()); + assert_eq!("37777777770", sprintf!("%o", -8).unwrap()); + assert_eq!("%s", sprintf!("%s", "%s").unwrap()); + assert_eq!("ff", sprintf!("%x", 255).unwrap()); + assert_eq!("ffffff01", sprintf!("%x", -255).unwrap()); + assert_eq!("FF", sprintf!("%X", 255).unwrap()); + assert_eq!("FFFFFF01", sprintf!("%X", -255).unwrap()); + assert_eq!( + "Polly wants a cracker", + sprintf!("%2$s %3$s a %1$s", "cracker", "Polly", "wants").unwrap() + ); + assert_eq!( + "Hello world!", + sprintf!("Hello %(who)s!", who = "world").unwrap() + ); + + assert_eq!("true", sprintf!("%t", true).unwrap()); + assert_eq!("t", sprintf!("%.1t", true).unwrap()); + // We don't implement truthiness + //assert_eq!("true", sprintf!("%t", "true").unwrap()); + //assert_eq!("true", sprintf!("%t", 1).unwrap()); + assert_eq!("false", sprintf!("%t", false).unwrap()); + assert_eq!("f", sprintf!("%.1t", false).unwrap()); + //assert_eq!("false", sprintf!("%t", "").unwrap()); + //assert_eq!("false", sprintf!("%t", 0).unwrap()); + + assert_eq!("null", sprintf!("%T", serde_json::json!(null)).unwrap()); + assert_eq!("boolean", sprintf!("%T", true).unwrap()); + assert_eq!("number", sprintf!("%T", 42).unwrap()); + assert_eq!("string", sprintf!("%T", "This is a string").unwrap()); + assert_eq!("array", sprintf!("%T", [1, 2, 3]).unwrap()); + assert_eq!( + "object", + sprintf!("%T", serde_json::json!({"foo": "bar"})).unwrap() + ); + } + + #[test] + fn test_complex() { + // sign + assert_eq!("2", sprintf!("%d", 2).unwrap()); + assert_eq!("-2", sprintf!("%d", -2).unwrap()); + assert_eq!("+2", sprintf!("%+d", 2).unwrap()); + assert_eq!("-2", sprintf!("%+d", -2).unwrap()); + assert_eq!("2", sprintf!("%i", 2).unwrap()); + assert_eq!("-2", sprintf!("%i", -2).unwrap()); + assert_eq!("+2", sprintf!("%+i", 2).unwrap()); + assert_eq!("-2", sprintf!("%+i", -2).unwrap()); + assert_eq!("2.2", sprintf!("%f", 2.2).unwrap()); + assert_eq!("-2.2", sprintf!("%f", -2.2).unwrap()); + assert_eq!("+2.2", sprintf!("%+f", 2.2).unwrap()); + assert_eq!("-2.2", sprintf!("%+f", -2.2).unwrap()); + assert_eq!("-2.3", sprintf!("%+.1f", -2.34).unwrap()); + assert_eq!("-0.0", sprintf!("%+.1f", -0.01).unwrap()); + + assert_eq!("3.14159", sprintf!("%.6g", PI).unwrap()); + assert_eq!("3.14", sprintf!("%.3g", PI).unwrap()); + assert_eq!("3", sprintf!("%.1g", PI).unwrap()); + assert_eq!("3e5", sprintf!("%.1g", 300_000.0).unwrap()); + assert_eq!("300", sprintf!("%.3g", 300).unwrap()); + + assert_eq!("-000000123", sprintf!("%+010d", -123).unwrap()); + assert_eq!("______-123", sprintf!("%+'_10d", -123).unwrap()); + assert_eq!("-234.34 123.2", sprintf!("%f %f", -234.34, 123.2).unwrap()); + + // padding + assert_eq!("-0002", sprintf!("%05d", -2).unwrap()); + assert_eq!("-0002", sprintf!("%05i", -2).unwrap()); + assert_eq!(" <", sprintf!("%5s", "<").unwrap()); + assert_eq!("0000<", sprintf!("%05s", "<").unwrap()); + assert_eq!("____<", sprintf!("%'_5s", "<").unwrap()); + assert_eq!("> ", sprintf!("%-5s", ">").unwrap()); + assert_eq!(">0000", sprintf!("%0-5s", ">").unwrap()); + assert_eq!(">____", sprintf!("%'_-5s", ">").unwrap()); + assert_eq!("xxxxxx", sprintf!("%5s", "xxxxxx").unwrap()); + assert_eq!("1234", sprintf!("%02u", 1234).unwrap()); + assert_eq!(" -10.235", sprintf!("%8.3f", -10.23456).unwrap()); + assert_eq!("-12.34 xxx", sprintf!("%f %s", -12.34, "xxx").unwrap()); + assert_eq!( + r#"{ + "foo": "bar" +}"#, + sprintf!("%2j", serde_json::json!({"foo": "bar"})).unwrap() + ); + assert_eq!( + r#"[ + "foo", + "bar" +]"#, + sprintf!("%2j", ["foo", "bar"]).unwrap() + ); + + // precision + assert_eq!("2.3", sprintf!("%.1f", 2.345).unwrap()); + assert_eq!("xxxxx", sprintf!("%5.5s", "xxxxxx").unwrap()); + assert_eq!(" x", sprintf!("%5.1s", "xxxxxx").unwrap()); + } +} diff --git a/matrix-authentication-service/crates/i18n/src/sprintf/parser.rs b/matrix-authentication-service/crates/i18n/src/sprintf/parser.rs new file mode 100644 index 00000000..ab5aaf87 --- /dev/null +++ b/matrix-authentication-service/crates/i18n/src/sprintf/parser.rs @@ -0,0 +1,337 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::result_large_err)] + +use std::str::FromStr; + +use pest::{Parser, Span, error::ErrorVariant, iterators::Pair}; + +use super::message::{ + ArgumentReference, Message, PaddingSpecifier, Part, Placeholder, TypeSpecifier, +}; + +#[derive(pest_derive::Parser)] +#[grammar = "sprintf/grammar.pest"] +struct SprintfParser; + +pub type Error = pest::error::Error; +type Result = std::result::Result; + +fn unexpected_rule_error(pair: &Pair) -> Error { + Error::new_from_span( + ErrorVariant::CustomError { + message: format!("Unexpected rule: {:?}", pair.as_rule()), + }, + pair.as_span(), + ) +} + +fn ensure_end_of_pairs(pairs: &mut pest::iterators::Pairs, span: Span<'_>) -> Result<()> { + if pairs.next().is_none() { + Ok(()) + } else { + Err(Error::new_from_span( + ErrorVariant::CustomError { + message: String::from("Expected end of pairs"), + }, + span, + )) + } +} + +fn next_pair<'i>( + pairs: &mut pest::iterators::Pairs<'i, Rule>, + span: Span<'i>, +) -> Result> { + pairs.next().ok_or_else(|| { + Error::new_from_span( + ErrorVariant::CustomError { + message: String::from("Expected pair"), + }, + span, + ) + }) +} + +fn ensure_rule_type(pair: &Pair, rule: Rule) -> Result<()> { + if pair.as_rule() == rule { + Ok(()) + } else { + Err(unexpected_rule_error(pair)) + } +} + +fn interpret_ident(pair: &Pair) -> Result { + ensure_rule_type(pair, Rule::ident)?; + Ok(pair.as_str().to_owned()) +} + +fn interpret_number(pair: &Pair) -> Result { + ensure_rule_type(pair, Rule::number)?; + pair.as_str().parse().map_err(|e| { + Error::new_from_span( + ErrorVariant::CustomError { + message: format!("Failed to parse number: {e}"), + }, + pair.as_span(), + ) + }) +} + +fn interpret_arg_named(pair: Pair) -> Result { + ensure_rule_type(&pair, Rule::arg_named)?; + let span = pair.as_span(); + let mut pairs = pair.into_inner(); + + let ident = next_pair(&mut pairs, span)?; + let ident = interpret_ident(&ident)?; + + ensure_end_of_pairs(&mut pairs, span)?; + Ok(ArgumentReference::Named(ident)) +} + +fn interpret_arg_indexed(pair: Pair) -> Result { + ensure_rule_type(&pair, Rule::arg_indexed)?; + let span = pair.as_span(); + let mut pairs = pair.into_inner(); + + let number = next_pair(&mut pairs, span)?; + let number = interpret_number(&number)?; + + ensure_end_of_pairs(&mut pairs, span)?; + Ok(ArgumentReference::Indexed(number)) +} + +fn interpret_padding_specifier(pair: &Pair) -> Result { + ensure_rule_type(pair, Rule::padding_specifier)?; + let specifier: Vec = pair.as_str().chars().collect(); + + let specifier = match specifier[..] { + ['0'] => PaddingSpecifier::Zero, + ['\'', c] => PaddingSpecifier::Char(c), + ref specifier => { + return Err(Error::new_from_span( + ErrorVariant::CustomError { + message: format!("Unexpected padding specifier: {specifier:?}"), + }, + pair.as_span(), + )); + } + }; + + Ok(specifier) +} + +fn interpret_width(pair: Pair) -> Result { + ensure_rule_type(&pair, Rule::width)?; + let span = pair.as_span(); + let mut pairs = pair.into_inner(); + + let number = next_pair(&mut pairs, span)?; + let number = interpret_number(&number)?; + + ensure_end_of_pairs(&mut pairs, span)?; + Ok(number) +} + +fn interpret_precision(pair: Pair) -> Result { + ensure_rule_type(&pair, Rule::precision)?; + let span = pair.as_span(); + let mut pairs = pair.into_inner(); + + let number = next_pair(&mut pairs, span)?; + let number = interpret_number(&number)?; + + ensure_end_of_pairs(&mut pairs, span)?; + Ok(number) +} + +fn interpret_type_specifier(pair: &Pair) -> Result { + ensure_rule_type(pair, Rule::type_specifier)?; + let specifier: Vec = pair.as_str().chars().collect(); + + let type_specifier = match specifier[..] { + ['b'] => TypeSpecifier::BinaryNumber, + ['c'] => TypeSpecifier::CharacterAsciiValue, + ['d'] => TypeSpecifier::DecimalNumber, + ['i'] => TypeSpecifier::IntegerNumber, + ['e'] => TypeSpecifier::ScientificNotation, + ['u'] => TypeSpecifier::UnsignedDecimalNumber, + ['f'] => TypeSpecifier::FloatingPointNumber, + ['g'] => TypeSpecifier::FloatingPointNumberWithSignificantDigits, + ['o'] => TypeSpecifier::OctalNumber, + ['s'] => TypeSpecifier::String, + ['t'] => TypeSpecifier::TrueOrFalse, + ['T'] => TypeSpecifier::TypeOfArgument, + ['v'] => TypeSpecifier::PrimitiveValue, + ['x'] => TypeSpecifier::HexadecimalNumberLowercase, + ['X'] => TypeSpecifier::HexadecimalNumberUppercase, + ['j'] => TypeSpecifier::Json, + _ => { + return Err(Error::new_from_span( + ErrorVariant::CustomError { + message: String::from("Unexpected type specifier"), + }, + pair.as_span(), + )); + } + }; + + Ok(type_specifier) +} + +fn interpret_placeholder(pair: Pair) -> Result { + ensure_rule_type(&pair, Rule::placeholder)?; + let span = pair.as_span(); + let mut pairs = pair.into_inner(); + let mut current_pair = next_pair(&mut pairs, span)?; + + let argument = if current_pair.as_rule() == Rule::arg_named { + let argument = interpret_arg_named(current_pair)?; + current_pair = next_pair(&mut pairs, span)?; + Some(argument) + } else if current_pair.as_rule() == Rule::arg_indexed { + let argument = interpret_arg_indexed(current_pair)?; + current_pair = next_pair(&mut pairs, span)?; + Some(argument) + } else { + None + }; + + let plus_sign = if current_pair.as_rule() == Rule::plus_sign { + current_pair = next_pair(&mut pairs, span)?; + true + } else { + false + }; + + let padding_specifier = if current_pair.as_rule() == Rule::padding_specifier { + let padding_specifier = interpret_padding_specifier(¤t_pair)?; + current_pair = next_pair(&mut pairs, span)?; + Some(padding_specifier) + } else { + None + }; + + let left_align = if current_pair.as_rule() == Rule::left_align { + current_pair = next_pair(&mut pairs, span)?; + true + } else { + false + }; + + let width = if current_pair.as_rule() == Rule::width { + let width = interpret_width(current_pair)?; + current_pair = next_pair(&mut pairs, span)?; + Some(width) + } else { + None + }; + + let precision = if current_pair.as_rule() == Rule::precision { + let precision = interpret_precision(current_pair)?; + current_pair = next_pair(&mut pairs, span)?; + Some(precision) + } else { + None + }; + + let type_specifier = interpret_type_specifier(¤t_pair)?; + + ensure_end_of_pairs(&mut pairs, span)?; + + Ok(Placeholder { + type_specifier, + requested_argument: argument, + plus_sign, + padding_specifier, + left_align, + width, + precision, + }) +} + +impl FromStr for Message { + type Err = Error; + + fn from_str(input: &str) -> Result { + SprintfParser::parse(Rule::message, input)? + // Filter out the "end of input" rule + .filter(|pair| pair.as_rule() != Rule::EOI) + .map(|pair| match pair.as_rule() { + Rule::text => Ok(pair.as_str().to_owned().into()), + Rule::percent => Ok(Part::Percent), + Rule::placeholder => Ok(interpret_placeholder(pair)?.into()), + _ => Err(unexpected_rule_error(&pair)), + }) + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parser() { + // Cases extracted from sprintf-js tests + let cases = [ + "%%", + "%'_-5s", + "%'_5s", + "%+'_10d", + "%+.1f", + "%+010d", + "%+d", + "%+f", + "%+i", + "%-5s", + "%.1f", + "%.1g", + "%.1t", + "%.3g", + "%.6g", + "%0-5s", + "%02u", + "%05d", + "%05i", + "%05s", + "%2$s %3$s a %1$s", + "%2j", + "%5.1s", + "%5.5s", + "%5s", + "%8.3f", + "%T", + "%X", + "%b", + "%c", + "%d", + "%e", + "%f", + "%f %f", + "%f %s", + "%g", + "%i", + "%j", + "%o", + "%s", + "%t", + "%u", + "%v", + "%x", + "Hello %(who)s!", + ]; + + for case in cases { + let result: Result = case.parse(); + assert!(result.is_ok(), "Failed to parse: {case}"); + let message = result.unwrap(); + assert_eq!(message.to_string(), *case); + } + } +} diff --git a/matrix-authentication-service/crates/i18n/src/translations.rs b/matrix-authentication-service/crates/i18n/src/translations.rs new file mode 100644 index 00000000..9bde8fa0 --- /dev/null +++ b/matrix-authentication-service/crates/i18n/src/translations.rs @@ -0,0 +1,338 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + collections::{BTreeMap, BTreeSet}, + ops::Deref, +}; + +use icu_plurals::PluralCategory; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{MapAccess, Visitor}, + ser::SerializeMap, +}; + +use crate::sprintf::Message; + +fn plural_category_as_str(category: PluralCategory) -> &'static str { + match category { + PluralCategory::Zero => "zero", + PluralCategory::One => "one", + PluralCategory::Two => "two", + PluralCategory::Few => "few", + PluralCategory::Many => "many", + PluralCategory::Other => "other", + } +} + +pub type TranslationTree = Tree; + +#[derive(Debug, Clone, Deserialize, Default)] +pub struct Metadata { + #[serde(skip)] + // We don't want to deserialize it, as we're resetting it every time + // This then generates the `context` field when serializing + pub context_locations: BTreeSet, + pub description: Option, +} + +impl Serialize for Metadata { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let context = self + .context_locations + .iter() + .map(String::as_str) + .collect::>() + .join(", "); + + let mut map = serializer.serialize_map(None)?; + + if !context.is_empty() { + map.serialize_entry("context", &context)?; + } + + if let Some(description) = &self.description { + map.serialize_entry("description", description)?; + } + + map.end() + } +} + +impl Metadata { + fn add_location(&mut self, location: String) { + self.context_locations.insert(location); + } +} + +#[derive(Debug, Clone, Default)] +pub struct Tree { + inner: BTreeMap, +} + +#[derive(Debug, Clone)] +pub struct Node { + metadata: Option, + value: Value, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum Value { + Tree(Tree), + Leaf(Message), +} + +impl<'de> Deserialize<'de> for Tree { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct TreeVisitor; + + impl<'de> Visitor<'de> for TreeVisitor { + type Value = Tree; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("map") + } + + fn visit_map(self, mut map: A) -> Result + where + A: MapAccess<'de>, + { + let mut tree: BTreeMap = BTreeMap::new(); + let mut metadata_map: BTreeMap = BTreeMap::new(); + + while let Some(key) = map.next_key::()? { + if let Some(name) = key.strip_prefix('@') { + let metadata = map.next_value::()?; + metadata_map.insert(name.to_owned(), metadata); + } else { + let value = map.next_value::()?; + tree.insert( + key, + Node { + metadata: None, + value, + }, + ); + } + } + + for (key, meta) in metadata_map { + if let Some(node) = tree.get_mut(&key) { + node.metadata = Some(meta); + } + } + + Ok(Tree { inner: tree }) + } + } + + deserializer.deserialize_any(TreeVisitor) + } +} + +impl Serialize for Tree { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut map = serializer.serialize_map(None)?; + + for (key, value) in &self.inner { + map.serialize_entry(key, &value.value)?; + if let Some(meta) = &value.metadata { + map.serialize_entry(&format!("@{key}"), meta)?; + } + } + + map.end() + } +} + +impl Tree { + /// Get a message from the tree by key. + /// + /// Returns `None` if the requested key is not found. + #[must_use] + pub fn message(&self, key: &str) -> Option<&Message> { + let keys = key.split('.'); + let node = self.walk_path(keys)?; + let message = node.value.as_message()?; + Some(message) + } + + /// Get a pluralized message from the tree by key and plural category. + /// + /// If the key doesn't have plural variants, this will return the message + /// itself. Returns the "other" category if the requested category is + /// not found. Returns `None` if the requested key is not found. + #[must_use] + pub fn pluralize(&self, key: &str, category: PluralCategory) -> Option<&Message> { + let keys = key.split('.'); + let node = self.walk_path(keys)?; + + let subtree = match &node.value { + Value::Leaf(message) => return Some(message), + Value::Tree(tree) => tree, + }; + + let node = if let Some(node) = subtree.inner.get(plural_category_as_str(category)) { + node + } else { + // Fallback to the "other" category + subtree.inner.get("other")? + }; + + let message = node.value.as_message()?; + Some(message) + } + + #[doc(hidden)] + pub fn set_if_not_defined, I: IntoIterator>( + &mut self, + path: I, + value: Message, + location: Option, + ) -> bool { + // We're temporarily moving the tree out of the struct to be able to nicely + // iterate on it + let mut fake_root = Node { + metadata: None, + value: Value::Tree(Tree { + inner: std::mem::take(&mut self.inner), + }), + }; + + let mut node = &mut fake_root; + for key in path { + match &mut node.value { + Value::Tree(tree) => { + node = tree.inner.entry(key.deref().to_owned()).or_insert(Node { + metadata: None, + value: Value::Tree(Tree::default()), + }); + } + Value::Leaf(_) => { + panic!() + } + } + } + + let replaced = match &node.value { + Value::Tree(tree) => { + assert!( + tree.inner.is_empty(), + "Trying to overwrite a non-empty tree" + ); + + node.value = Value::Leaf(value); + true + } + Value::Leaf(_) => { + // Do not overwrite existing values + false + } + }; + + if let Some(location) = location { + node.metadata + .get_or_insert(Metadata::default()) + .add_location(location); + } + + // Restore the original tree at the end of the function + match fake_root { + Node { + value: Value::Tree(tree), + .. + } => self.inner = tree.inner, + _ => panic!("Tried to replace the root node"), + } + + replaced + } + + fn walk_path, I: IntoIterator>( + &self, + path: I, + ) -> Option<&Node> { + let mut iterator = path.into_iter(); + let next = iterator.next()?; + self.walk_path_inner(next, iterator) + } + + fn walk_path_inner, I: Iterator>( + &self, + next_key: K, + mut path: I, + ) -> Option<&Node> { + let next = self.inner.get(&*next_key)?; + + match path.next() { + Some(next_key) => match &next.value { + Value::Tree(tree) => tree.walk_path_inner(next_key, path), + Value::Leaf(_) => None, + }, + None => Some(next), + } + } +} + +impl Value { + fn as_message(&self) -> Option<&Message> { + match self { + Value::Leaf(message) => Some(message), + Value::Tree(_) => None, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::sprintf::{ArgumentList, arg_list}; + + #[test] + fn test_it_works() { + let tree = serde_json::json!({ + "hello": "world", + "damals": { + "about_x_hours_ago": { + "one": "about one hour ago", + "other": "about %(count)s hours ago" + } + } + }); + + let result: Result = serde_json::from_value(tree); + assert!(result.is_ok()); + let tree = result.unwrap(); + let message = tree.message("hello"); + assert!(message.is_some()); + let message = message.unwrap(); + assert_eq!(message.format(&ArgumentList::default()).unwrap(), "world"); + + let message = tree.message("damals.about_x_hours_ago.one"); + assert!(message.is_some()); + let message = message.unwrap(); + assert_eq!(message.format(&arg_list!()).unwrap(), "about one hour ago"); + + let message = tree.pluralize("damals.about_x_hours_ago", PluralCategory::Other); + assert!(message.is_some()); + let message = message.unwrap(); + assert_eq!( + message.format(&arg_list!(count = 2)).unwrap(), + "about 2 hours ago" + ); + } +} diff --git a/matrix-authentication-service/crates/i18n/src/translator.rs b/matrix-authentication-service/crates/i18n/src/translator.rs new file mode 100644 index 00000000..cd36a6c4 --- /dev/null +++ b/matrix-authentication-service/crates/i18n/src/translator.rs @@ -0,0 +1,480 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, fs::File, io::BufReader, str::FromStr}; + +use camino::{Utf8Path, Utf8PathBuf}; +use icu_experimental::relativetime::{ + RelativeTimeFormatter, RelativeTimeFormatterOptions, options::Numeric, +}; +use icu_locid::{Locale, ParserError}; +use icu_locid_transform::fallback::{ + LocaleFallbackPriority, LocaleFallbackSupplement, LocaleFallbacker, LocaleFallbackerWithConfig, +}; +use icu_plurals::{PluralRules, PluralsError}; +use icu_provider::{ + DataError, DataErrorKind, DataKey, DataLocale, DataRequest, DataRequestMetadata, data_key, + fallback::LocaleFallbackConfig, +}; +use icu_provider_adapters::fallback::LocaleFallbackProvider; +use thiserror::Error; +use writeable::Writeable; + +use crate::{sprintf::Message, translations::TranslationTree}; + +/// Fake data key for errors +const DATA_KEY: DataKey = data_key!("mas/translations@1"); + +const FALLBACKER: LocaleFallbackerWithConfig<'static> = LocaleFallbacker::new().for_config({ + let mut config = LocaleFallbackConfig::const_default(); + config.priority = LocaleFallbackPriority::Collation; + config.fallback_supplement = Some(LocaleFallbackSupplement::Collation); + config +}); + +/// Construct a [`DataRequest`] for the given locale +pub fn data_request_for_locale(locale: &DataLocale) -> DataRequest<'_> { + let mut metadata = DataRequestMetadata::default(); + metadata.silent = true; + DataRequest { locale, metadata } +} + +/// Error type for loading translations +#[derive(Debug, Error)] +pub enum LoadError { + #[error("Failed to load translation directory {path:?}")] + ReadDir { + path: Utf8PathBuf, + #[source] + source: std::io::Error, + }, + + #[error("Failed to read translation file {path:?}")] + ReadFile { + path: Utf8PathBuf, + #[source] + source: std::io::Error, + }, + + #[error("Failed to deserialize translation file {path:?}")] + Deserialize { + path: Utf8PathBuf, + #[source] + source: serde_json::Error, + }, + + #[error("Invalid locale for file {path:?}")] + InvalidLocale { + path: Utf8PathBuf, + #[source] + source: ParserError, + }, + + #[error("Invalid file name {path:?}")] + InvalidFileName { path: Utf8PathBuf }, +} + +/// A translator for a set of translations. +#[derive(Debug)] +pub struct Translator { + translations: HashMap, + plural_provider: LocaleFallbackProvider, + default_locale: DataLocale, +} + +impl Translator { + /// Create a new translator from a set of translations. + #[must_use] + pub fn new(translations: HashMap) -> Self { + let fallbacker = LocaleFallbacker::new().static_to_owned(); + let plural_provider = LocaleFallbackProvider::new_with_fallbacker( + icu_plurals::provider::Baked, + fallbacker.clone(), + ); + + Self { + translations, + plural_provider, + // TODO: make this configurable + default_locale: icu_locid::locale!("en").into(), + } + } + + /// Load a set of translations from a directory. + /// + /// The directory should contain one JSON file per locale, with the locale + /// being the filename without the extension, e.g. `en-US.json`. + /// + /// # Parameters + /// + /// * `path` - The path to load from. + /// + /// # Errors + /// + /// Returns an error if the directory cannot be read, or if any of the files + /// cannot be parsed. + pub fn load_from_path(path: &Utf8Path) -> Result { + let mut translations = HashMap::new(); + + let dir = path.read_dir_utf8().map_err(|source| LoadError::ReadDir { + path: path.to_owned(), + source, + })?; + + for entry in dir { + let entry = entry.map_err(|source| LoadError::ReadDir { + path: path.to_owned(), + source, + })?; + let path = entry.into_path(); + let Some(name) = path.file_stem() else { + return Err(LoadError::InvalidFileName { path }); + }; + + let locale: Locale = match Locale::from_str(name) { + Ok(locale) => locale, + Err(source) => return Err(LoadError::InvalidLocale { path, source }), + }; + + let file = match File::open(&path) { + Ok(file) => file, + Err(source) => return Err(LoadError::ReadFile { path, source }), + }; + + let mut reader = BufReader::new(file); + + let content = match serde_json::from_reader(&mut reader) { + Ok(content) => content, + Err(source) => return Err(LoadError::Deserialize { path, source }), + }; + + translations.insert(locale.into(), content); + } + + Ok(Self::new(translations)) + } + + /// Get a message from the tree by key, with locale fallback. + /// + /// Returns the message and the locale it was found in. + /// If the message is not found, returns `None`. + /// + /// # Parameters + /// + /// * `locale` - The locale to use. + /// * `key` - The key to look up, which is a dot-separated path. + #[must_use] + pub fn message_with_fallback( + &self, + locale: DataLocale, + key: &str, + ) -> Option<(&Message, DataLocale)> { + if let Ok(message) = self.message(&locale, key) { + return Some((message, locale)); + } + + let mut iter = FALLBACKER.fallback_for(locale); + + loop { + let locale = iter.get(); + + if let Ok(message) = self.message(locale, key) { + return Some((message, iter.take())); + } + + // Try the defaut locale if we hit the `und` locale + if locale.is_und() { + let message = self.message(&self.default_locale, key).ok()?; + return Some((message, self.default_locale.clone())); + } + + iter.step(); + } + } + + /// Get a message from the tree by key. + /// + /// # Parameters + /// + /// * `locale` - The locale to use. + /// * `key` - The key to look up, which is a dot-separated path. + /// + /// # Errors + /// + /// Returns an error if the requested locale is not found, or if the + /// requested key is not found. + pub fn message(&self, locale: &DataLocale, key: &str) -> Result<&Message, DataError> { + let request = data_request_for_locale(locale); + + let tree = self + .translations + .get(locale) + .ok_or_else(|| DataErrorKind::MissingLocale.with_req(DATA_KEY, request))?; + + let message = tree + .message(key) + .ok_or_else(|| DataErrorKind::MissingDataKey.with_req(DATA_KEY, request))?; + + Ok(message) + } + + /// Get a plural message from the tree by key, with locale fallback. + /// + /// Returns the message and the locale it was found in. + /// If the message is not found, returns `None`. + /// + /// # Parameters + /// + /// * `locale` - The locale to use. + /// * `key` - The key to look up, which is a dot-separated path. + /// * `count` - The count to use for pluralization. + #[must_use] + pub fn plural_with_fallback( + &self, + locale: DataLocale, + key: &str, + count: usize, + ) -> Option<(&Message, DataLocale)> { + let mut iter = FALLBACKER.fallback_for(locale); + + loop { + let locale = iter.get(); + + if let Ok(message) = self.plural(locale, key, count) { + return Some((message, iter.take())); + } + + // Stop if we hit the `und` locale + if locale.is_und() { + return None; + } + + iter.step(); + } + } + + /// Get a plural message from the tree by key. + /// + /// # Parameters + /// + /// * `locale` - The locale to use. + /// * `key` - The key to look up, which is a dot-separated path. + /// * `count` - The count to use for pluralization. + /// + /// # Errors + /// + /// Returns an error if the requested locale is not found, or if the + /// requested key is not found. + pub fn plural( + &self, + locale: &DataLocale, + key: &str, + count: usize, + ) -> Result<&Message, PluralsError> { + let plurals = PluralRules::try_new_cardinal_unstable(&self.plural_provider, locale)?; + let category = plurals.category_for(count); + + let request = data_request_for_locale(locale); + + let tree = self + .translations + .get(locale) + .ok_or_else(|| DataErrorKind::MissingLocale.with_req(DATA_KEY, request))?; + + let message = tree + .pluralize(key, category) + .ok_or_else(|| DataErrorKind::MissingDataKey.with_req(DATA_KEY, request))?; + + Ok(message) + } + + /// Format a relative date + /// + /// # Parameters + /// + /// * `locale` - The locale to use. + /// * `days` - The number of days to format, where 0 = today, 1 = tomorrow, + /// -1 = yesterday, etc. + /// + /// # Errors + /// + /// Returns an error if the requested locale is not found. + pub fn relative_date( + &self, + locale: &DataLocale, + days: i64, + ) -> Result { + // TODO: this is not using the fallbacker + let formatter = RelativeTimeFormatter::try_new_long_day( + locale, + RelativeTimeFormatterOptions { + numeric: Numeric::Auto, + }, + )?; + + let date = formatter.format(days.into()); + Ok(date.write_to_string().into_owned()) + } + + /// Format time + /// + /// # Parameters + /// + /// * `locale` - The locale to use. + /// * `time` - The time to format. + /// + /// # Errors + /// + /// Returns an error if the requested locale is not found. + pub fn short_time( + &self, + locale: &DataLocale, + time: &T, + ) -> Result { + // TODO: this is not using the fallbacker + let formatter = icu_datetime::TimeFormatter::try_new_with_length( + locale, + icu_datetime::options::length::Time::Short, + )?; + + Ok(formatter.format_to_string(time)) + } + + /// Get a list of available locales. + #[must_use] + pub fn available_locales(&self) -> Vec { + self.translations.keys().cloned().collect() + } + + /// Check if a locale is available. + #[must_use] + pub fn has_locale(&self, locale: &DataLocale) -> bool { + self.translations.contains_key(locale) + } + + /// Choose the best available locale from a list of candidates. + #[must_use] + pub fn choose_locale(&self, iter: impl Iterator) -> DataLocale { + for locale in iter { + if self.has_locale(&locale) { + return locale; + } + + let mut fallbacker = FALLBACKER.fallback_for(locale); + + loop { + if fallbacker.get().is_und() { + break; + } + + if self.has_locale(fallbacker.get()) { + return fallbacker.take(); + } + fallbacker.step(); + } + } + + self.default_locale.clone() + } +} + +#[cfg(test)] +mod tests { + use camino::Utf8PathBuf; + use icu_locid::locale; + + use crate::{sprintf::arg_list, translator::Translator}; + + fn translator() -> Translator { + let root: Utf8PathBuf = env!("CARGO_MANIFEST_DIR").parse().unwrap(); + let test_data = root.join("test_data"); + Translator::load_from_path(&test_data).unwrap() + } + + #[test] + fn test_message() { + let translator = translator(); + + let message = translator.message(&locale!("en").into(), "hello").unwrap(); + let formatted = message.format(&arg_list!()).unwrap(); + assert_eq!(formatted, "Hello!"); + + let message = translator.message(&locale!("fr").into(), "hello").unwrap(); + let formatted = message.format(&arg_list!()).unwrap(); + assert_eq!(formatted, "Bonjour !"); + + let message = translator + .message(&locale!("en-US").into(), "hello") + .unwrap(); + let formatted = message.format(&arg_list!()).unwrap(); + assert_eq!(formatted, "Hey!"); + + // Try the fallback chain + let result = translator.message(&locale!("en-US").into(), "goodbye"); + assert!(result.is_err()); + + let (message, locale) = translator + .message_with_fallback(locale!("en-US").into(), "goodbye") + .unwrap(); + let formatted = message.format(&arg_list!()).unwrap(); + assert_eq!(formatted, "Goodbye!"); + assert_eq!(locale, locale!("en").into()); + } + + #[test] + fn test_plurals() { + let translator = translator(); + + let message = translator + .plural(&locale!("en").into(), "active_sessions", 1) + .unwrap(); + let formatted = message.format(&arg_list!(count = 1)).unwrap(); + assert_eq!(formatted, "1 active session."); + + let message = translator + .plural(&locale!("en").into(), "active_sessions", 2) + .unwrap(); + let formatted = message.format(&arg_list!(count = 2)).unwrap(); + assert_eq!(formatted, "2 active sessions."); + + // In english, zero is plural + let message = translator + .plural(&locale!("en").into(), "active_sessions", 0) + .unwrap(); + let formatted = message.format(&arg_list!(count = 0)).unwrap(); + assert_eq!(formatted, "0 active sessions."); + + let message = translator + .plural(&locale!("fr").into(), "active_sessions", 1) + .unwrap(); + let formatted = message.format(&arg_list!(count = 1)).unwrap(); + assert_eq!(formatted, "1 session active."); + + let message = translator + .plural(&locale!("fr").into(), "active_sessions", 2) + .unwrap(); + let formatted = message.format(&arg_list!(count = 2)).unwrap(); + assert_eq!(formatted, "2 sessions actives."); + + // In french, zero is singular + let message = translator + .plural(&locale!("fr").into(), "active_sessions", 0) + .unwrap(); + let formatted = message.format(&arg_list!(count = 0)).unwrap(); + assert_eq!(formatted, "0 session active."); + + // Try the fallback chain + let result = translator.plural(&locale!("en-US").into(), "active_sessions", 1); + assert!(result.is_err()); + + let (message, locale) = translator + .plural_with_fallback(locale!("en-US").into(), "active_sessions", 1) + .unwrap(); + let formatted = message.format(&arg_list!(count = 1)).unwrap(); + assert_eq!(formatted, "1 active session."); + assert_eq!(locale, locale!("en").into()); + } +} diff --git a/matrix-authentication-service/crates/i18n/test_data/en-US.json b/matrix-authentication-service/crates/i18n/test_data/en-US.json new file mode 100644 index 00000000..3d2f9beb --- /dev/null +++ b/matrix-authentication-service/crates/i18n/test_data/en-US.json @@ -0,0 +1,3 @@ +{ + "hello": "Hey!" +} \ No newline at end of file diff --git a/matrix-authentication-service/crates/i18n/test_data/en.json b/matrix-authentication-service/crates/i18n/test_data/en.json new file mode 100644 index 00000000..8734c2f3 --- /dev/null +++ b/matrix-authentication-service/crates/i18n/test_data/en.json @@ -0,0 +1,8 @@ +{ + "hello": "Hello!", + "goodbye": "Goodbye!", + "active_sessions": { + "one": "%(count)d active session.", + "other": "%(count)d active sessions." + } +} \ No newline at end of file diff --git a/matrix-authentication-service/crates/i18n/test_data/fr.json b/matrix-authentication-service/crates/i18n/test_data/fr.json new file mode 100644 index 00000000..24a982ac --- /dev/null +++ b/matrix-authentication-service/crates/i18n/test_data/fr.json @@ -0,0 +1,8 @@ +{ + "hello": "Bonjour !", + "goodbye": "Au revoir !", + "active_sessions": { + "one": "%(count)d session active.", + "other": "%(count)d sessions actives." + } +} \ No newline at end of file diff --git a/matrix-authentication-service/crates/iana-codegen/Cargo.toml b/matrix-authentication-service/crates/iana-codegen/Cargo.toml new file mode 100644 index 00000000..0a8e8669 --- /dev/null +++ b/matrix-authentication-service/crates/iana-codegen/Cargo.toml @@ -0,0 +1,30 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-iana-codegen" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +camino.workspace = true +convert_case.workspace = true +csv.workspace = true +reqwest.workspace = true +rustls.workspace = true +serde.workspace = true +tokio.workspace = true +tracing-subscriber.workspace = true +tracing.workspace = true diff --git a/matrix-authentication-service/crates/iana-codegen/src/generation.rs b/matrix-authentication-service/crates/iana-codegen/src/generation.rs new file mode 100644 index 00000000..2f1d4836 --- /dev/null +++ b/matrix-authentication-service/crates/iana-codegen/src/generation.rs @@ -0,0 +1,243 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use crate::traits::{EnumMember, Section}; + +fn raw_string(string: &str) -> String { + if string.contains('"') { + format!(r##"r#"{string}"#"##) + } else { + format!(r#"r"{string}""#) + } +} + +pub fn struct_def( + f: &mut std::fmt::Formatter<'_>, + section: &Section, + list: &[EnumMember], + is_exhaustive: bool, +) -> std::fmt::Result { + write!( + f, + r"/// {} +/// +/// Source: <{}> +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]", + section.doc, + section.url.unwrap(), + )?; + + if !is_exhaustive { + write!( + f, + r" +#[non_exhaustive]" + )?; + } + + write!( + f, + r" +pub enum {} {{", + section.key, + )?; + for member in list { + writeln!(f)?; + if let Some(description) = &member.description { + writeln!(f, " /// {description}")?; + } else { + writeln!(f, " /// `{}`", member.value)?; + } + writeln!(f, " {},", member.enum_name)?; + } + + if !is_exhaustive { + // Add a variant for custom enums + writeln!(f)?; + writeln!(f, " /// An unknown value.")?; + writeln!(f, " Unknown(String),")?; + } + + writeln!(f, "}}") +} + +pub fn display_impl( + f: &mut std::fmt::Formatter<'_>, + section: &Section, + list: &[EnumMember], + is_exhaustive: bool, +) -> std::fmt::Result { + write!( + f, + r"impl core::fmt::Display for {} {{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {{ + match self {{", + section.key, + )?; + + for member in list { + write!( + f, + r#" + Self::{} => write!(f, "{}"),"#, + member.enum_name, member.value + )?; + } + + if !is_exhaustive { + write!( + f, + r#" + Self::Unknown(value) => write!(f, "{{value}}"),"# + )?; + } + + writeln!( + f, + r" + }} + }} +}}", + ) +} + +pub fn from_str_impl( + f: &mut std::fmt::Formatter<'_>, + section: &Section, + list: &[EnumMember], + is_exhaustive: bool, +) -> std::fmt::Result { + let err_ty = if is_exhaustive { + "crate::ParseError" + } else { + "core::convert::Infallible" + }; + write!( + f, + r"impl core::str::FromStr for {} {{ + type Err = {err_ty}; + + fn from_str(s: &str) -> Result {{ + match s {{", + section.key, + )?; + + for member in list { + write!( + f, + r#" + "{}" => Ok(Self::{}),"#, + member.value, member.enum_name + )?; + } + + if is_exhaustive { + write!( + f, + r" + _ => Err(crate::ParseError::new())," + )?; + } else { + write!( + f, + r" + value => Ok(Self::Unknown(value.to_owned())),", + )?; + } + + writeln!( + f, + r" + }} + }} +}}", + ) +} + +pub fn json_schema_impl( + f: &mut std::fmt::Formatter<'_>, + section: &Section, + list: &[EnumMember], +) -> std::fmt::Result { + write!( + f, + r#"impl schemars::JsonSchema for {} {{ + fn schema_name() -> std::borrow::Cow<'static, str> {{ + std::borrow::Cow::Borrowed("{}") + }} + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema {{ + let enums = vec!["#, + section.key, section.key, + )?; + + for member in list { + write!( + f, + r" + // --- + schemars::json_schema!({{", + )?; + + if let Some(description) = &member.description { + write!( + f, + r#" + "description": {},"#, + raw_string(description), + )?; + } + + write!( + f, + r#" + "const": "{}", + }}),"#, + member.value + )?; + } + + writeln!( + f, + r#" + ]; + + let description = {}; + schemars::json_schema!({{ + "description": description, + "anyOf": enums, + }}) + }} +}}"#, + raw_string(section.doc), + ) +} + +pub fn serde_impl(f: &mut std::fmt::Formatter<'_>, section: &Section) -> std::fmt::Result { + writeln!( + f, + r"impl<'de> serde::Deserialize<'de> for {} {{ + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + {{ + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + }} +}} + +impl serde::Serialize for {} {{ + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + {{ + serializer.serialize_str(&self.to_string()) + }} +}}", + section.key, section.key, + ) +} diff --git a/matrix-authentication-service/crates/iana-codegen/src/jose.rs b/matrix-authentication-service/crates/iana-codegen/src/jose.rs new file mode 100644 index 00000000..36d00de7 --- /dev/null +++ b/matrix-authentication-service/crates/iana-codegen/src/jose.rs @@ -0,0 +1,284 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use serde::Deserialize; + +use crate::{ + EnumEntry, + traits::{Section, s}, +}; + +#[derive(Debug, Deserialize, PartialEq, Eq)] +enum Usage { + #[serde(rename = "alg")] + Alg, + #[serde(rename = "enc")] + Enc, + #[serde(rename = "JWK")] + Jwk, +} + +#[derive(Debug, Deserialize)] +enum Requirements { + Required, + #[serde(rename = "Recommended+")] + RecommendedPlus, + Recommended, + #[serde(rename = "Recommended-")] + RecommendedMinus, + Optional, + Prohibited, + Deprecated, +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct WebEncryptionSignatureAlgorithm { + #[serde(rename = "Algorithm Name")] + name: String, + #[serde(rename = "Algorithm Description")] + description: String, + #[serde(rename = "Algorithm Usage Location(s)")] + usage: Usage, + #[serde(rename = "JOSE Implementation Requirements")] + requirements: Requirements, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, + #[serde(rename = "Algorithm Analysis Document(s)")] + analysis: String, +} + +impl EnumEntry for WebEncryptionSignatureAlgorithm { + const URL: &'static str = + "http://www.iana.org/assignments/jose/web-signature-encryption-algorithms.csv"; + const SECTIONS: &'static [Section] = &[ + s( + "JsonWebSignatureAlg", + r#"JSON Web Signature "alg" parameter"#, + ), + s( + "JsonWebEncryptionAlg", + r#"JSON Web Encryption "alg" parameter"#, + ), + s( + "JsonWebEncryptionEnc", + r#"JSON Web Encryption "enc" parameter"#, + ), + ]; + + fn key(&self) -> Option<&'static str> { + match self.usage { + Usage::Alg => { + // RFC7518 has one for signature algs and one for encryption algs. The other two + // RFCs are additional Elliptic curve signature algs + if self.reference.contains("RFC7518, Section 3") + || self.reference.contains("RFC8037") + || self.reference.contains("RFC8812") + || (self + .reference + .contains("RFC-ietf-jose-fully-specified-algorithms") + && self.reference.contains("Section 2")) + { + Some("JsonWebSignatureAlg") + } else if self.reference.contains("RFC7518, Section 4") + || self.reference.contains("WebCryptoAPI") + || (self + .reference + .contains("RFC-ietf-jose-fully-specified-algorithms") + && self.reference.contains("Section 3")) + { + Some("JsonWebEncryptionAlg") + } else { + tracing::warn!("Unknown reference {} for JWA", self.reference); + None + } + } + Usage::Enc => Some("JsonWebEncryptionEnc"), + Usage::Jwk => None, + } + } + + fn name(&self) -> &str { + &self.name + } + + fn description(&self) -> Option<&str> { + Some(&self.description) + } +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct WebEncryptionCompressionAlgorithm { + #[serde(rename = "Compression Algorithm Value")] + value: String, + #[serde(rename = "Compression Algorithm Description")] + description: String, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +impl EnumEntry for WebEncryptionCompressionAlgorithm { + const URL: &'static str = + "http://www.iana.org/assignments/jose/web-encryption-compression-algorithms.csv"; + const SECTIONS: &'static [Section] = &[s( + "JsonWebEncryptionCompressionAlgorithm", + "JSON Web Encryption Compression Algorithm", + )]; + + fn key(&self) -> Option<&'static str> { + Some("JsonWebEncryptionCompressionAlgorithm") + } + + fn name(&self) -> &str { + &self.value + } + + fn description(&self) -> Option<&str> { + Some(&self.description) + } +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct WebKeyType { + #[serde(rename = "\"kty\" Parameter Value")] + value: String, + #[serde(rename = "Key Type Description")] + description: String, + #[serde(rename = "JOSE Implementation Requirements")] + requirements: Requirements, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +impl EnumEntry for WebKeyType { + const URL: &'static str = "http://www.iana.org/assignments/jose/web-key-types.csv"; + const SECTIONS: &'static [Section] = &[s("JsonWebKeyType", "JSON Web Key Type")]; + + fn key(&self) -> Option<&'static str> { + Some("JsonWebKeyType") + } + + fn name(&self) -> &str { + &self.value + } + + fn description(&self) -> Option<&str> { + Some(&self.description) + } +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct WebKeyEllipticCurve { + #[serde(rename = "Curve Name")] + name: String, + #[serde(rename = "Curve Description")] + description: String, + #[serde(rename = "JOSE Implementation Requirements")] + requirements: Requirements, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +impl EnumEntry for WebKeyEllipticCurve { + const URL: &'static str = "http://www.iana.org/assignments/jose/web-key-elliptic-curve.csv"; + const SECTIONS: &'static [Section] = &[ + s( + "JsonWebKeyEcEllipticCurve", + "JSON Web Key EC Elliptic Curve", + ), + s( + "JsonWebKeyOkpEllipticCurve", + "JSON Web Key OKP Elliptic Curve", + ), + ]; + + fn key(&self) -> Option<&'static str> { + if self.name.starts_with("P-") || self.name == "secp256k1" { + Some("JsonWebKeyEcEllipticCurve") + } else { + Some("JsonWebKeyOkpEllipticCurve") + } + } + + fn name(&self) -> &str { + &self.name + } + + fn description(&self) -> Option<&str> { + Some(&self.description) + } +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct WebKeyUse { + #[serde(rename = "Use Member Value")] + value: String, + #[serde(rename = "Use Description")] + description: String, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +impl EnumEntry for WebKeyUse { + const URL: &'static str = "http://www.iana.org/assignments/jose/web-key-use.csv"; + const SECTIONS: &'static [Section] = &[s("JsonWebKeyUse", "JSON Web Key Use")]; + + fn key(&self) -> Option<&'static str> { + Some("JsonWebKeyUse") + } + + fn name(&self) -> &str { + &self.value + } + + fn description(&self) -> Option<&str> { + Some(&self.description) + } +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct WebKeyOperation { + #[serde(rename = "Key Operation Value")] + name: String, + #[serde(rename = "Key Operation Description")] + description: String, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +impl EnumEntry for WebKeyOperation { + const URL: &'static str = "http://www.iana.org/assignments/jose/web-key-operations.csv"; + const SECTIONS: &'static [Section] = &[s("JsonWebKeyOperation", "JSON Web Key Operation")]; + + fn key(&self) -> Option<&'static str> { + Some("JsonWebKeyOperation") + } + + fn name(&self) -> &str { + &self.name + } + + fn description(&self) -> Option<&str> { + Some(&self.description) + } +} diff --git a/matrix-authentication-service/crates/iana-codegen/src/main.rs b/matrix-authentication-service/crates/iana-codegen/src/main.rs new file mode 100644 index 00000000..d2ff734e --- /dev/null +++ b/matrix-authentication-service/crates/iana-codegen/src/main.rs @@ -0,0 +1,208 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, fmt::Display}; + +use camino::{Utf8Path, Utf8PathBuf}; +use reqwest::Client; +use tokio::io::AsyncWriteExt; +use tracing::Level; + +mod generation; +pub mod jose; +pub mod oauth; +pub mod traits; + +#[derive(Debug)] +struct File { + client: Client, + registry_name: &'static str, + registry_url: &'static str, + sections: Vec
, + items: HashMap<&'static str, Vec>, +} + +fn resolve_path(relative: impl AsRef) -> Utf8PathBuf { + let crate_root = Utf8Path::new(env!("CARGO_MANIFEST_DIR")); + let workspace_root = crate_root.parent().unwrap().parent().unwrap(); + workspace_root.join(relative) +} + +impl File { + #[tracing::instrument(skip(client))] + fn new(registry_name: &'static str, registry_url: &'static str, client: Client) -> Self { + tracing::info!("Generating file from IANA registry"); + Self { + client, + registry_name, + registry_url, + sections: Vec::new(), + items: HashMap::new(), + } + } + + #[tracing::instrument(skip_all, fields(url))] + async fn load(mut self) -> anyhow::Result { + tracing::Span::current().record("url", T::URL); + self.sections.extend(T::sections()); + for (key, value) in T::fetch(&self.client).await? { + self.items.entry(key).or_default().push(value); + } + Ok(self) + } + + #[tracing::instrument(skip_all)] + async fn write(&self, path: impl AsRef) -> anyhow::Result<()> { + let mut file = tokio::fs::OpenOptions::new() + .create(true) + .truncate(true) + .write(true) + .open(path.as_ref()) + .await?; + + tracing::info!("Writing file"); + file.write_all(format!("{self}").as_bytes()).await?; + + Ok(()) + } +} + +impl Display for File { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!( + f, + r"// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::doc_markdown)] + +//! Enums from the {:?} IANA registry +//! See <{}> + +// Do not edit this file manually", + self.registry_name, self.registry_url, + )?; + + for section in &self.sections { + let Some(list) = self.items.get(section.key) else { + continue; + }; + + let is_exhaustive = section.key == "OAuthAuthorizationEndpointResponseType"; + writeln!(f)?; + + self::generation::struct_def(f, section, list, is_exhaustive)?; + writeln!(f)?; + + // Write the Display impl + self::generation::display_impl(f, section, list, is_exhaustive)?; + writeln!(f)?; + + // Write the FromStr impl + self::generation::from_str_impl(f, section, list, is_exhaustive)?; + writeln!(f)?; + + // Write the Serialize and Deserialize impls + self::generation::serde_impl(f, section)?; + writeln!(f)?; + + // Write the JsonSchema impl + self::generation::json_schema_impl(f, section, list)?; + } + + Ok(()) + } +} + +use self::traits::{EnumEntry, EnumMember, Section}; + +#[tracing::instrument(skip_all, fields(%path))] +async fn generate_jose( + client: &Client, + path: impl AsRef + std::fmt::Display, +) -> anyhow::Result<()> { + let path = resolve_path(path); + let client = client.clone(); + + let file = File::new( + "JSON Object Signing and Encryption", + "https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml", + client.clone(), + ) + .load::() + .await? + .load::() + .await? + .load::() + .await? + .load::() + .await? + .load::() + .await? + .load::() + .await?; + + file.write(path).await?; + + Ok(()) +} + +#[tracing::instrument(skip_all, fields(%path))] +async fn generate_oauth( + client: &Client, + path: impl AsRef + std::fmt::Display, +) -> anyhow::Result<()> { + let path = resolve_path(path); + let client = client.clone(); + + let file = File::new( + "OAuth Parameters", + "https://www.iana.org/assignments/jose/jose.xhtml", + client, + ) + .load::() + .await? + .load::() + .await? + .load::() + .await? + .load::() + .await? + .load::() + .await?; + + file.write(path).await?; + + Ok(()) +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + tracing_subscriber::fmt() + .with_max_level(Level::INFO) + .pretty() + .init(); + + rustls::crypto::aws_lc_rs::default_provider() + .install_default() + .unwrap(); + + #[expect( + clippy::disallowed_methods, + reason = "reqwest::Client::new should be disallowed by clippy, but for the codegen it's fine" + )] + let client = Client::new(); + + let iana_crate_root = Utf8Path::new("crates/iana/"); + + generate_jose(&client, iana_crate_root.join("src/jose.rs")).await?; + generate_oauth(&client, iana_crate_root.join("src/oauth.rs")).await?; + + Ok(()) +} diff --git a/matrix-authentication-service/crates/iana-codegen/src/oauth.rs b/matrix-authentication-service/crates/iana-codegen/src/oauth.rs new file mode 100644 index 00000000..2809bb7d --- /dev/null +++ b/matrix-authentication-service/crates/iana-codegen/src/oauth.rs @@ -0,0 +1,146 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use serde::Deserialize; + +use crate::{ + EnumEntry, + traits::{Section, s}, +}; + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct AccessTokenType { + #[serde(rename = "Name")] + name: String, + #[serde(rename = "Additional Token Endpoint Response Parameters")] + additional_parameters: String, + #[serde(rename = "HTTP Authentication Scheme(s)")] + http_schemes: String, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +impl EnumEntry for AccessTokenType { + const URL: &'static str = "http://www.iana.org/assignments/oauth-parameters/token-types.csv"; + const SECTIONS: &'static [Section] = &[s("OAuthAccessTokenType", "OAuth Access Token Type")]; + + fn key(&self) -> Option<&'static str> { + Some("OAuthAccessTokenType") + } + + fn name(&self) -> &str { + &self.name + } +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct AuthorizationEndpointResponseType { + #[serde(rename = "Name")] + name: String, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +impl EnumEntry for AuthorizationEndpointResponseType { + const URL: &'static str = "http://www.iana.org/assignments/oauth-parameters/endpoint.csv"; + const SECTIONS: &'static [Section] = &[s( + "OAuthAuthorizationEndpointResponseType", + "OAuth Authorization Endpoint Response Type", + )]; + + fn key(&self) -> Option<&'static str> { + Some("OAuthAuthorizationEndpointResponseType") + } + + fn name(&self) -> &str { + &self.name + } +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct TokenEndpointAuthenticationMethod { + #[serde(rename = "Token Endpoint Authentication Method Name")] + name: String, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct TokenTypeHint { + #[serde(rename = "Hint Value")] + name: String, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +impl EnumEntry for TokenTypeHint { + const URL: &'static str = + "http://www.iana.org/assignments/oauth-parameters/token-type-hint.csv"; + const SECTIONS: &'static [Section] = &[s("OAuthTokenTypeHint", "OAuth Token Type Hint")]; + + fn key(&self) -> Option<&'static str> { + Some("OAuthTokenTypeHint") + } + + fn name(&self) -> &str { + &self.name + } +} + +impl EnumEntry for TokenEndpointAuthenticationMethod { + const URL: &'static str = + "http://www.iana.org/assignments/oauth-parameters/token-endpoint-auth-method.csv"; + const SECTIONS: &'static [Section] = &[s( + "OAuthClientAuthenticationMethod", + "OAuth Token Endpoint Authentication Method", + )]; + + fn key(&self) -> Option<&'static str> { + Some("OAuthClientAuthenticationMethod") + } + + fn name(&self) -> &str { + &self.name + } +} + +#[allow(dead_code)] +#[derive(Debug, Deserialize)] +pub struct PkceCodeChallengeMethod { + #[serde(rename = "Code Challenge Method Parameter Name")] + name: String, + #[serde(rename = "Change Controller")] + change_controller: String, + #[serde(rename = "Reference")] + reference: String, +} + +impl EnumEntry for PkceCodeChallengeMethod { + const URL: &'static str = + "http://www.iana.org/assignments/oauth-parameters/pkce-code-challenge-method.csv"; + const SECTIONS: &'static [Section] = + &[s("PkceCodeChallengeMethod", "PKCE Code Challenge Method")]; + + fn key(&self) -> Option<&'static str> { + Some("PkceCodeChallengeMethod") + } + + fn name(&self) -> &str { + &self.name + } +} diff --git a/matrix-authentication-service/crates/iana-codegen/src/traits.rs b/matrix-authentication-service/crates/iana-codegen/src/traits.rs new file mode 100644 index 00000000..c5a2617e --- /dev/null +++ b/matrix-authentication-service/crates/iana-codegen/src/traits.rs @@ -0,0 +1,116 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context; +use async_trait::async_trait; +use convert_case::{Case, Casing}; +use serde::de::DeserializeOwned; + +use super::Client; + +#[derive(Debug, Clone)] +pub struct Section { + pub key: &'static str, + pub doc: &'static str, + pub url: Option<&'static str>, +} + +#[must_use] +pub const fn s(key: &'static str, doc: &'static str) -> Section { + Section { + key, + doc, + url: None, + } +} + +#[derive(Debug)] +pub struct EnumMember { + pub value: String, + pub description: Option, + pub enum_name: String, +} + +#[async_trait] +pub trait EnumEntry: DeserializeOwned + Send + Sync { + const URL: &'static str; + const SECTIONS: &'static [Section]; + + #[must_use] + fn sections() -> Vec
{ + Self::SECTIONS + .iter() + .map(|s| Section { + url: Some(Self::URL), + ..*s + }) + .collect() + } + + fn key(&self) -> Option<&'static str>; + fn name(&self) -> &str; + fn description(&self) -> Option<&str> { + None + } + fn enum_name(&self) -> String { + // Do the case transformation twice to have "N_A" turned to "Na" instead of "NA" + self.name() + .replace('+', "_") + .to_case(Case::Pascal) + .to_case(Case::Pascal) + } + + async fn fetch(client: &Client) -> anyhow::Result> { + tracing::info!("Fetching CSV"); + + #[expect( + clippy::disallowed_methods, + reason = "we don't use send_traced in the codegen" + )] + let response = client + .get(Self::URL) + .header("User-Agent", "mas-iana-codegen/0.1") + .send() + .await + .context(format!("can't the CSV at {}", Self::URL))?; + + let status = response.status(); + anyhow::ensure!(status.is_success(), "HTTP status code is not 200: {status}"); + + let body = response + .text() + .await + .context(format!("can't the CSV body at {}", Self::URL))?; + + let parsed: Result, _> = csv::Reader::from_reader(body.as_bytes()) + .into_deserialize() + .filter_map(|item: Result| { + item.map(|item| { + if item + .description() + .is_some_and(|desc| desc.contains("TEMPORARY")) + { + return None; + } + + item.key().map(|key| { + ( + key, + EnumMember { + value: item.name().to_owned(), + description: item.description().map(ToOwned::to_owned), + enum_name: item.enum_name(), + }, + ) + }) + }) + .transpose() + }) + .collect(); + + Ok(parsed.context(format!("can't parse the CSV at {}", Self::URL))?) + } +} diff --git a/matrix-authentication-service/crates/iana/Cargo.toml b/matrix-authentication-service/crates/iana/Cargo.toml new file mode 100644 index 00000000..d646bac1 --- /dev/null +++ b/matrix-authentication-service/crates/iana/Cargo.toml @@ -0,0 +1,22 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-iana" +description = "IANA registry data for JOSE and OAuth 2.0" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +serde.workspace = true +schemars.workspace = true diff --git a/matrix-authentication-service/crates/iana/src/jose.rs b/matrix-authentication-service/crates/iana/src/jose.rs new file mode 100644 index 00000000..8a0434fa --- /dev/null +++ b/matrix-authentication-service/crates/iana/src/jose.rs @@ -0,0 +1,1247 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::doc_markdown)] + +//! Enums from the "JSON Object Signing and Encryption" IANA registry +//! See + +// Do not edit this file manually + +/// JSON Web Signature "alg" parameter +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum JsonWebSignatureAlg { + /// HMAC using SHA-256 + Hs256, + + /// HMAC using SHA-384 + Hs384, + + /// HMAC using SHA-512 + Hs512, + + /// RSASSA-PKCS1-v1_5 using SHA-256 + Rs256, + + /// RSASSA-PKCS1-v1_5 using SHA-384 + Rs384, + + /// RSASSA-PKCS1-v1_5 using SHA-512 + Rs512, + + /// ECDSA using P-256 and SHA-256 + Es256, + + /// ECDSA using P-384 and SHA-384 + Es384, + + /// ECDSA using P-521 and SHA-512 + Es512, + + /// RSASSA-PSS using SHA-256 and MGF1 with SHA-256 + Ps256, + + /// RSASSA-PSS using SHA-384 and MGF1 with SHA-384 + Ps384, + + /// RSASSA-PSS using SHA-512 and MGF1 with SHA-512 + Ps512, + + /// No digital signature or MAC performed + None, + + /// EdDSA signature algorithms + EdDsa, + + /// ECDSA using secp256k1 curve and SHA-256 + Es256K, + + /// EdDSA using Ed25519 curve + Ed25519, + + /// EdDSA using Ed448 curve + Ed448, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for JsonWebSignatureAlg { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Hs256 => write!(f, "HS256"), + Self::Hs384 => write!(f, "HS384"), + Self::Hs512 => write!(f, "HS512"), + Self::Rs256 => write!(f, "RS256"), + Self::Rs384 => write!(f, "RS384"), + Self::Rs512 => write!(f, "RS512"), + Self::Es256 => write!(f, "ES256"), + Self::Es384 => write!(f, "ES384"), + Self::Es512 => write!(f, "ES512"), + Self::Ps256 => write!(f, "PS256"), + Self::Ps384 => write!(f, "PS384"), + Self::Ps512 => write!(f, "PS512"), + Self::None => write!(f, "none"), + Self::EdDsa => write!(f, "EdDSA"), + Self::Es256K => write!(f, "ES256K"), + Self::Ed25519 => write!(f, "Ed25519"), + Self::Ed448 => write!(f, "Ed448"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for JsonWebSignatureAlg { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "HS256" => Ok(Self::Hs256), + "HS384" => Ok(Self::Hs384), + "HS512" => Ok(Self::Hs512), + "RS256" => Ok(Self::Rs256), + "RS384" => Ok(Self::Rs384), + "RS512" => Ok(Self::Rs512), + "ES256" => Ok(Self::Es256), + "ES384" => Ok(Self::Es384), + "ES512" => Ok(Self::Es512), + "PS256" => Ok(Self::Ps256), + "PS384" => Ok(Self::Ps384), + "PS512" => Ok(Self::Ps512), + "none" => Ok(Self::None), + "EdDSA" => Ok(Self::EdDsa), + "ES256K" => Ok(Self::Es256K), + "Ed25519" => Ok(Self::Ed25519), + "Ed448" => Ok(Self::Ed448), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for JsonWebSignatureAlg { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for JsonWebSignatureAlg { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for JsonWebSignatureAlg { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("JsonWebSignatureAlg") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "description": r"HMAC using SHA-256", + "const": "HS256", + }), + // --- + schemars::json_schema!({ + "description": r"HMAC using SHA-384", + "const": "HS384", + }), + // --- + schemars::json_schema!({ + "description": r"HMAC using SHA-512", + "const": "HS512", + }), + // --- + schemars::json_schema!({ + "description": r"RSASSA-PKCS1-v1_5 using SHA-256", + "const": "RS256", + }), + // --- + schemars::json_schema!({ + "description": r"RSASSA-PKCS1-v1_5 using SHA-384", + "const": "RS384", + }), + // --- + schemars::json_schema!({ + "description": r"RSASSA-PKCS1-v1_5 using SHA-512", + "const": "RS512", + }), + // --- + schemars::json_schema!({ + "description": r"ECDSA using P-256 and SHA-256", + "const": "ES256", + }), + // --- + schemars::json_schema!({ + "description": r"ECDSA using P-384 and SHA-384", + "const": "ES384", + }), + // --- + schemars::json_schema!({ + "description": r"ECDSA using P-521 and SHA-512", + "const": "ES512", + }), + // --- + schemars::json_schema!({ + "description": r"RSASSA-PSS using SHA-256 and MGF1 with SHA-256", + "const": "PS256", + }), + // --- + schemars::json_schema!({ + "description": r"RSASSA-PSS using SHA-384 and MGF1 with SHA-384", + "const": "PS384", + }), + // --- + schemars::json_schema!({ + "description": r"RSASSA-PSS using SHA-512 and MGF1 with SHA-512", + "const": "PS512", + }), + // --- + schemars::json_schema!({ + "description": r"No digital signature or MAC performed", + "const": "none", + }), + // --- + schemars::json_schema!({ + "description": r"EdDSA signature algorithms", + "const": "EdDSA", + }), + // --- + schemars::json_schema!({ + "description": r"ECDSA using secp256k1 curve and SHA-256", + "const": "ES256K", + }), + // --- + schemars::json_schema!({ + "description": r"EdDSA using Ed25519 curve", + "const": "Ed25519", + }), + // --- + schemars::json_schema!({ + "description": r"EdDSA using Ed448 curve", + "const": "Ed448", + }), + ]; + + let description = r#"JSON Web Signature "alg" parameter"#; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// JSON Web Encryption "alg" parameter +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum JsonWebEncryptionAlg { + /// RSAES-PKCS1-v1_5 + Rsa15, + + /// RSAES OAEP using default parameters + RsaOaep, + + /// RSAES OAEP using SHA-256 and MGF1 with SHA-256 + RsaOaep256, + + /// AES Key Wrap using 128-bit key + A128Kw, + + /// AES Key Wrap using 192-bit key + A192Kw, + + /// AES Key Wrap using 256-bit key + A256Kw, + + /// Direct use of a shared symmetric key + Dir, + + /// ECDH-ES using Concat KDF + EcdhEs, + + /// ECDH-ES using Concat KDF and "A128KW" wrapping + EcdhEsA128Kw, + + /// ECDH-ES using Concat KDF and "A192KW" wrapping + EcdhEsA192Kw, + + /// ECDH-ES using Concat KDF and "A256KW" wrapping + EcdhEsA256Kw, + + /// Key wrapping with AES GCM using 128-bit key + A128Gcmkw, + + /// Key wrapping with AES GCM using 192-bit key + A192Gcmkw, + + /// Key wrapping with AES GCM using 256-bit key + A256Gcmkw, + + /// PBES2 with HMAC SHA-256 and "A128KW" wrapping + Pbes2Hs256A128Kw, + + /// PBES2 with HMAC SHA-384 and "A192KW" wrapping + Pbes2Hs384A192Kw, + + /// PBES2 with HMAC SHA-512 and "A256KW" wrapping + Pbes2Hs512A256Kw, + + /// RSA-OAEP using SHA-384 and MGF1 with SHA-384 + RsaOaep384, + + /// RSA-OAEP using SHA-512 and MGF1 with SHA-512 + RsaOaep512, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for JsonWebEncryptionAlg { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Rsa15 => write!(f, "RSA1_5"), + Self::RsaOaep => write!(f, "RSA-OAEP"), + Self::RsaOaep256 => write!(f, "RSA-OAEP-256"), + Self::A128Kw => write!(f, "A128KW"), + Self::A192Kw => write!(f, "A192KW"), + Self::A256Kw => write!(f, "A256KW"), + Self::Dir => write!(f, "dir"), + Self::EcdhEs => write!(f, "ECDH-ES"), + Self::EcdhEsA128Kw => write!(f, "ECDH-ES+A128KW"), + Self::EcdhEsA192Kw => write!(f, "ECDH-ES+A192KW"), + Self::EcdhEsA256Kw => write!(f, "ECDH-ES+A256KW"), + Self::A128Gcmkw => write!(f, "A128GCMKW"), + Self::A192Gcmkw => write!(f, "A192GCMKW"), + Self::A256Gcmkw => write!(f, "A256GCMKW"), + Self::Pbes2Hs256A128Kw => write!(f, "PBES2-HS256+A128KW"), + Self::Pbes2Hs384A192Kw => write!(f, "PBES2-HS384+A192KW"), + Self::Pbes2Hs512A256Kw => write!(f, "PBES2-HS512+A256KW"), + Self::RsaOaep384 => write!(f, "RSA-OAEP-384"), + Self::RsaOaep512 => write!(f, "RSA-OAEP-512"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for JsonWebEncryptionAlg { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "RSA1_5" => Ok(Self::Rsa15), + "RSA-OAEP" => Ok(Self::RsaOaep), + "RSA-OAEP-256" => Ok(Self::RsaOaep256), + "A128KW" => Ok(Self::A128Kw), + "A192KW" => Ok(Self::A192Kw), + "A256KW" => Ok(Self::A256Kw), + "dir" => Ok(Self::Dir), + "ECDH-ES" => Ok(Self::EcdhEs), + "ECDH-ES+A128KW" => Ok(Self::EcdhEsA128Kw), + "ECDH-ES+A192KW" => Ok(Self::EcdhEsA192Kw), + "ECDH-ES+A256KW" => Ok(Self::EcdhEsA256Kw), + "A128GCMKW" => Ok(Self::A128Gcmkw), + "A192GCMKW" => Ok(Self::A192Gcmkw), + "A256GCMKW" => Ok(Self::A256Gcmkw), + "PBES2-HS256+A128KW" => Ok(Self::Pbes2Hs256A128Kw), + "PBES2-HS384+A192KW" => Ok(Self::Pbes2Hs384A192Kw), + "PBES2-HS512+A256KW" => Ok(Self::Pbes2Hs512A256Kw), + "RSA-OAEP-384" => Ok(Self::RsaOaep384), + "RSA-OAEP-512" => Ok(Self::RsaOaep512), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for JsonWebEncryptionAlg { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for JsonWebEncryptionAlg { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for JsonWebEncryptionAlg { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("JsonWebEncryptionAlg") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "description": r"RSAES-PKCS1-v1_5", + "const": "RSA1_5", + }), + // --- + schemars::json_schema!({ + "description": r"RSAES OAEP using default parameters", + "const": "RSA-OAEP", + }), + // --- + schemars::json_schema!({ + "description": r"RSAES OAEP using SHA-256 and MGF1 with SHA-256", + "const": "RSA-OAEP-256", + }), + // --- + schemars::json_schema!({ + "description": r"AES Key Wrap using 128-bit key", + "const": "A128KW", + }), + // --- + schemars::json_schema!({ + "description": r"AES Key Wrap using 192-bit key", + "const": "A192KW", + }), + // --- + schemars::json_schema!({ + "description": r"AES Key Wrap using 256-bit key", + "const": "A256KW", + }), + // --- + schemars::json_schema!({ + "description": r"Direct use of a shared symmetric key", + "const": "dir", + }), + // --- + schemars::json_schema!({ + "description": r"ECDH-ES using Concat KDF", + "const": "ECDH-ES", + }), + // --- + schemars::json_schema!({ + "description": r#"ECDH-ES using Concat KDF and "A128KW" wrapping"#, + "const": "ECDH-ES+A128KW", + }), + // --- + schemars::json_schema!({ + "description": r#"ECDH-ES using Concat KDF and "A192KW" wrapping"#, + "const": "ECDH-ES+A192KW", + }), + // --- + schemars::json_schema!({ + "description": r#"ECDH-ES using Concat KDF and "A256KW" wrapping"#, + "const": "ECDH-ES+A256KW", + }), + // --- + schemars::json_schema!({ + "description": r"Key wrapping with AES GCM using 128-bit key", + "const": "A128GCMKW", + }), + // --- + schemars::json_schema!({ + "description": r"Key wrapping with AES GCM using 192-bit key", + "const": "A192GCMKW", + }), + // --- + schemars::json_schema!({ + "description": r"Key wrapping with AES GCM using 256-bit key", + "const": "A256GCMKW", + }), + // --- + schemars::json_schema!({ + "description": r#"PBES2 with HMAC SHA-256 and "A128KW" wrapping"#, + "const": "PBES2-HS256+A128KW", + }), + // --- + schemars::json_schema!({ + "description": r#"PBES2 with HMAC SHA-384 and "A192KW" wrapping"#, + "const": "PBES2-HS384+A192KW", + }), + // --- + schemars::json_schema!({ + "description": r#"PBES2 with HMAC SHA-512 and "A256KW" wrapping"#, + "const": "PBES2-HS512+A256KW", + }), + // --- + schemars::json_schema!({ + "description": r"RSA-OAEP using SHA-384 and MGF1 with SHA-384", + "const": "RSA-OAEP-384", + }), + // --- + schemars::json_schema!({ + "description": r"RSA-OAEP using SHA-512 and MGF1 with SHA-512", + "const": "RSA-OAEP-512", + }), + ]; + + let description = r#"JSON Web Encryption "alg" parameter"#; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// JSON Web Encryption "enc" parameter +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum JsonWebEncryptionEnc { + /// AES_128_CBC_HMAC_SHA_256 authenticated encryption algorithm + A128CbcHs256, + + /// AES_192_CBC_HMAC_SHA_384 authenticated encryption algorithm + A192CbcHs384, + + /// AES_256_CBC_HMAC_SHA_512 authenticated encryption algorithm + A256CbcHs512, + + /// AES GCM using 128-bit key + A128Gcm, + + /// AES GCM using 192-bit key + A192Gcm, + + /// AES GCM using 256-bit key + A256Gcm, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for JsonWebEncryptionEnc { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::A128CbcHs256 => write!(f, "A128CBC-HS256"), + Self::A192CbcHs384 => write!(f, "A192CBC-HS384"), + Self::A256CbcHs512 => write!(f, "A256CBC-HS512"), + Self::A128Gcm => write!(f, "A128GCM"), + Self::A192Gcm => write!(f, "A192GCM"), + Self::A256Gcm => write!(f, "A256GCM"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for JsonWebEncryptionEnc { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "A128CBC-HS256" => Ok(Self::A128CbcHs256), + "A192CBC-HS384" => Ok(Self::A192CbcHs384), + "A256CBC-HS512" => Ok(Self::A256CbcHs512), + "A128GCM" => Ok(Self::A128Gcm), + "A192GCM" => Ok(Self::A192Gcm), + "A256GCM" => Ok(Self::A256Gcm), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for JsonWebEncryptionEnc { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for JsonWebEncryptionEnc { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for JsonWebEncryptionEnc { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("JsonWebEncryptionEnc") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "description": r"AES_128_CBC_HMAC_SHA_256 authenticated encryption algorithm", + "const": "A128CBC-HS256", + }), + // --- + schemars::json_schema!({ + "description": r"AES_192_CBC_HMAC_SHA_384 authenticated encryption algorithm", + "const": "A192CBC-HS384", + }), + // --- + schemars::json_schema!({ + "description": r"AES_256_CBC_HMAC_SHA_512 authenticated encryption algorithm", + "const": "A256CBC-HS512", + }), + // --- + schemars::json_schema!({ + "description": r"AES GCM using 128-bit key", + "const": "A128GCM", + }), + // --- + schemars::json_schema!({ + "description": r"AES GCM using 192-bit key", + "const": "A192GCM", + }), + // --- + schemars::json_schema!({ + "description": r"AES GCM using 256-bit key", + "const": "A256GCM", + }), + ]; + + let description = r#"JSON Web Encryption "enc" parameter"#; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// JSON Web Encryption Compression Algorithm +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum JsonWebEncryptionCompressionAlgorithm { + /// DEFLATE + Def, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for JsonWebEncryptionCompressionAlgorithm { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Def => write!(f, "DEF"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for JsonWebEncryptionCompressionAlgorithm { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "DEF" => Ok(Self::Def), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for JsonWebEncryptionCompressionAlgorithm { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for JsonWebEncryptionCompressionAlgorithm { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for JsonWebEncryptionCompressionAlgorithm { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("JsonWebEncryptionCompressionAlgorithm") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "description": r"DEFLATE", + "const": "DEF", + }), + ]; + + let description = r"JSON Web Encryption Compression Algorithm"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// JSON Web Key Type +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum JsonWebKeyType { + /// Elliptic Curve + Ec, + + /// RSA + Rsa, + + /// Octet sequence + Oct, + + /// Octet string key pairs + Okp, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for JsonWebKeyType { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Ec => write!(f, "EC"), + Self::Rsa => write!(f, "RSA"), + Self::Oct => write!(f, "oct"), + Self::Okp => write!(f, "OKP"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for JsonWebKeyType { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "EC" => Ok(Self::Ec), + "RSA" => Ok(Self::Rsa), + "oct" => Ok(Self::Oct), + "OKP" => Ok(Self::Okp), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for JsonWebKeyType { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for JsonWebKeyType { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for JsonWebKeyType { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("JsonWebKeyType") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "description": r"Elliptic Curve", + "const": "EC", + }), + // --- + schemars::json_schema!({ + "description": r"RSA", + "const": "RSA", + }), + // --- + schemars::json_schema!({ + "description": r"Octet sequence", + "const": "oct", + }), + // --- + schemars::json_schema!({ + "description": r"Octet string key pairs", + "const": "OKP", + }), + ]; + + let description = r"JSON Web Key Type"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// JSON Web Key EC Elliptic Curve +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum JsonWebKeyEcEllipticCurve { + /// P-256 Curve + P256, + + /// P-384 Curve + P384, + + /// P-521 Curve + P521, + + /// SECG secp256k1 curve + Secp256K1, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for JsonWebKeyEcEllipticCurve { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::P256 => write!(f, "P-256"), + Self::P384 => write!(f, "P-384"), + Self::P521 => write!(f, "P-521"), + Self::Secp256K1 => write!(f, "secp256k1"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for JsonWebKeyEcEllipticCurve { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "P-256" => Ok(Self::P256), + "P-384" => Ok(Self::P384), + "P-521" => Ok(Self::P521), + "secp256k1" => Ok(Self::Secp256K1), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for JsonWebKeyEcEllipticCurve { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for JsonWebKeyEcEllipticCurve { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for JsonWebKeyEcEllipticCurve { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("JsonWebKeyEcEllipticCurve") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "description": r"P-256 Curve", + "const": "P-256", + }), + // --- + schemars::json_schema!({ + "description": r"P-384 Curve", + "const": "P-384", + }), + // --- + schemars::json_schema!({ + "description": r"P-521 Curve", + "const": "P-521", + }), + // --- + schemars::json_schema!({ + "description": r"SECG secp256k1 curve", + "const": "secp256k1", + }), + ]; + + let description = r"JSON Web Key EC Elliptic Curve"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// JSON Web Key OKP Elliptic Curve +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum JsonWebKeyOkpEllipticCurve { + /// Ed25519 signature algorithm key pairs + Ed25519, + + /// Ed448 signature algorithm key pairs + Ed448, + + /// X25519 function key pairs + X25519, + + /// X448 function key pairs + X448, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for JsonWebKeyOkpEllipticCurve { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Ed25519 => write!(f, "Ed25519"), + Self::Ed448 => write!(f, "Ed448"), + Self::X25519 => write!(f, "X25519"), + Self::X448 => write!(f, "X448"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for JsonWebKeyOkpEllipticCurve { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "Ed25519" => Ok(Self::Ed25519), + "Ed448" => Ok(Self::Ed448), + "X25519" => Ok(Self::X25519), + "X448" => Ok(Self::X448), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for JsonWebKeyOkpEllipticCurve { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for JsonWebKeyOkpEllipticCurve { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for JsonWebKeyOkpEllipticCurve { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("JsonWebKeyOkpEllipticCurve") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "description": r"Ed25519 signature algorithm key pairs", + "const": "Ed25519", + }), + // --- + schemars::json_schema!({ + "description": r"Ed448 signature algorithm key pairs", + "const": "Ed448", + }), + // --- + schemars::json_schema!({ + "description": r"X25519 function key pairs", + "const": "X25519", + }), + // --- + schemars::json_schema!({ + "description": r"X448 function key pairs", + "const": "X448", + }), + ]; + + let description = r"JSON Web Key OKP Elliptic Curve"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// JSON Web Key Use +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum JsonWebKeyUse { + /// Digital Signature or MAC + Sig, + + /// Encryption + Enc, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for JsonWebKeyUse { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Sig => write!(f, "sig"), + Self::Enc => write!(f, "enc"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for JsonWebKeyUse { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "sig" => Ok(Self::Sig), + "enc" => Ok(Self::Enc), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for JsonWebKeyUse { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for JsonWebKeyUse { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for JsonWebKeyUse { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("JsonWebKeyUse") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "description": r"Digital Signature or MAC", + "const": "sig", + }), + // --- + schemars::json_schema!({ + "description": r"Encryption", + "const": "enc", + }), + ]; + + let description = r"JSON Web Key Use"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// JSON Web Key Operation +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum JsonWebKeyOperation { + /// Compute digital signature or MAC + Sign, + + /// Verify digital signature or MAC + Verify, + + /// Encrypt content + Encrypt, + + /// Decrypt content and validate decryption, if applicable + Decrypt, + + /// Encrypt key + WrapKey, + + /// Decrypt key and validate decryption, if applicable + UnwrapKey, + + /// Derive key + DeriveKey, + + /// Derive bits not to be used as a key + DeriveBits, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for JsonWebKeyOperation { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Sign => write!(f, "sign"), + Self::Verify => write!(f, "verify"), + Self::Encrypt => write!(f, "encrypt"), + Self::Decrypt => write!(f, "decrypt"), + Self::WrapKey => write!(f, "wrapKey"), + Self::UnwrapKey => write!(f, "unwrapKey"), + Self::DeriveKey => write!(f, "deriveKey"), + Self::DeriveBits => write!(f, "deriveBits"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for JsonWebKeyOperation { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "sign" => Ok(Self::Sign), + "verify" => Ok(Self::Verify), + "encrypt" => Ok(Self::Encrypt), + "decrypt" => Ok(Self::Decrypt), + "wrapKey" => Ok(Self::WrapKey), + "unwrapKey" => Ok(Self::UnwrapKey), + "deriveKey" => Ok(Self::DeriveKey), + "deriveBits" => Ok(Self::DeriveBits), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for JsonWebKeyOperation { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for JsonWebKeyOperation { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for JsonWebKeyOperation { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("JsonWebKeyOperation") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "description": r"Compute digital signature or MAC", + "const": "sign", + }), + // --- + schemars::json_schema!({ + "description": r"Verify digital signature or MAC", + "const": "verify", + }), + // --- + schemars::json_schema!({ + "description": r"Encrypt content", + "const": "encrypt", + }), + // --- + schemars::json_schema!({ + "description": r"Decrypt content and validate decryption, if applicable", + "const": "decrypt", + }), + // --- + schemars::json_schema!({ + "description": r"Encrypt key", + "const": "wrapKey", + }), + // --- + schemars::json_schema!({ + "description": r"Decrypt key and validate decryption, if applicable", + "const": "unwrapKey", + }), + // --- + schemars::json_schema!({ + "description": r"Derive key", + "const": "deriveKey", + }), + // --- + schemars::json_schema!({ + "description": r"Derive bits not to be used as a key", + "const": "deriveBits", + }), + ]; + + let description = r"JSON Web Key Operation"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} diff --git a/matrix-authentication-service/crates/iana/src/lib.rs b/matrix-authentication-service/crates/iana/src/lib.rs new file mode 100644 index 00000000..657e7375 --- /dev/null +++ b/matrix-authentication-service/crates/iana/src/lib.rs @@ -0,0 +1,38 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Values from IANA registries, generated by the `mas-iana-codegen` crate + +#![deny(missing_docs)] +#![allow(clippy::module_name_repetitions)] + +pub mod jose; +pub mod oauth; + +/// An error that occurred while parsing a value from a string. +pub struct ParseError { + _private: (), +} + +impl ParseError { + fn new() -> Self { + Self { _private: () } + } +} + +impl core::fmt::Debug for ParseError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str("ParseError") + } +} + +impl core::fmt::Display for ParseError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str("Parse error") + } +} + +impl std::error::Error for ParseError {} diff --git a/matrix-authentication-service/crates/iana/src/oauth.rs b/matrix-authentication-service/crates/iana/src/oauth.rs new file mode 100644 index 00000000..f59cd7c8 --- /dev/null +++ b/matrix-authentication-service/crates/iana/src/oauth.rs @@ -0,0 +1,548 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::doc_markdown)] + +//! Enums from the "OAuth Parameters" IANA registry +//! See + +// Do not edit this file manually + +/// OAuth Access Token Type +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum OAuthAccessTokenType { + /// `Bearer` + Bearer, + + /// `N_A` + Na, + + /// `PoP` + PoP, + + /// `DPoP` + DPoP, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for OAuthAccessTokenType { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Bearer => write!(f, "Bearer"), + Self::Na => write!(f, "N_A"), + Self::PoP => write!(f, "PoP"), + Self::DPoP => write!(f, "DPoP"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for OAuthAccessTokenType { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "Bearer" => Ok(Self::Bearer), + "N_A" => Ok(Self::Na), + "PoP" => Ok(Self::PoP), + "DPoP" => Ok(Self::DPoP), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for OAuthAccessTokenType { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for OAuthAccessTokenType { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for OAuthAccessTokenType { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("OAuthAccessTokenType") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "const": "Bearer", + }), + // --- + schemars::json_schema!({ + "const": "N_A", + }), + // --- + schemars::json_schema!({ + "const": "PoP", + }), + // --- + schemars::json_schema!({ + "const": "DPoP", + }), + ]; + + let description = r"OAuth Access Token Type"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// OAuth Authorization Endpoint Response Type +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum OAuthAuthorizationEndpointResponseType { + /// `code` + Code, + + /// `code id_token` + CodeIdToken, + + /// `code id_token token` + CodeIdTokenToken, + + /// `code token` + CodeToken, + + /// `id_token` + IdToken, + + /// `id_token token` + IdTokenToken, + + /// `none` + None, + + /// `token` + Token, +} + +impl core::fmt::Display for OAuthAuthorizationEndpointResponseType { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Code => write!(f, "code"), + Self::CodeIdToken => write!(f, "code id_token"), + Self::CodeIdTokenToken => write!(f, "code id_token token"), + Self::CodeToken => write!(f, "code token"), + Self::IdToken => write!(f, "id_token"), + Self::IdTokenToken => write!(f, "id_token token"), + Self::None => write!(f, "none"), + Self::Token => write!(f, "token"), + } + } +} + +impl core::str::FromStr for OAuthAuthorizationEndpointResponseType { + type Err = crate::ParseError; + + fn from_str(s: &str) -> Result { + match s { + "code" => Ok(Self::Code), + "code id_token" => Ok(Self::CodeIdToken), + "code id_token token" => Ok(Self::CodeIdTokenToken), + "code token" => Ok(Self::CodeToken), + "id_token" => Ok(Self::IdToken), + "id_token token" => Ok(Self::IdTokenToken), + "none" => Ok(Self::None), + "token" => Ok(Self::Token), + _ => Err(crate::ParseError::new()), + } + } +} + +impl<'de> serde::Deserialize<'de> for OAuthAuthorizationEndpointResponseType { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for OAuthAuthorizationEndpointResponseType { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for OAuthAuthorizationEndpointResponseType { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("OAuthAuthorizationEndpointResponseType") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "const": "code", + }), + // --- + schemars::json_schema!({ + "const": "code id_token", + }), + // --- + schemars::json_schema!({ + "const": "code id_token token", + }), + // --- + schemars::json_schema!({ + "const": "code token", + }), + // --- + schemars::json_schema!({ + "const": "id_token", + }), + // --- + schemars::json_schema!({ + "const": "id_token token", + }), + // --- + schemars::json_schema!({ + "const": "none", + }), + // --- + schemars::json_schema!({ + "const": "token", + }), + ]; + + let description = r"OAuth Authorization Endpoint Response Type"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// OAuth Token Type Hint +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum OAuthTokenTypeHint { + /// `access_token` + AccessToken, + + /// `refresh_token` + RefreshToken, + + /// `pct` + Pct, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for OAuthTokenTypeHint { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::AccessToken => write!(f, "access_token"), + Self::RefreshToken => write!(f, "refresh_token"), + Self::Pct => write!(f, "pct"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for OAuthTokenTypeHint { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "access_token" => Ok(Self::AccessToken), + "refresh_token" => Ok(Self::RefreshToken), + "pct" => Ok(Self::Pct), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for OAuthTokenTypeHint { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for OAuthTokenTypeHint { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for OAuthTokenTypeHint { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("OAuthTokenTypeHint") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "const": "access_token", + }), + // --- + schemars::json_schema!({ + "const": "refresh_token", + }), + // --- + schemars::json_schema!({ + "const": "pct", + }), + ]; + + let description = r"OAuth Token Type Hint"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// OAuth Token Endpoint Authentication Method +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum OAuthClientAuthenticationMethod { + /// `none` + None, + + /// `client_secret_post` + ClientSecretPost, + + /// `client_secret_basic` + ClientSecretBasic, + + /// `client_secret_jwt` + ClientSecretJwt, + + /// `private_key_jwt` + PrivateKeyJwt, + + /// `tls_client_auth` + TlsClientAuth, + + /// `self_signed_tls_client_auth` + SelfSignedTlsClientAuth, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for OAuthClientAuthenticationMethod { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::None => write!(f, "none"), + Self::ClientSecretPost => write!(f, "client_secret_post"), + Self::ClientSecretBasic => write!(f, "client_secret_basic"), + Self::ClientSecretJwt => write!(f, "client_secret_jwt"), + Self::PrivateKeyJwt => write!(f, "private_key_jwt"), + Self::TlsClientAuth => write!(f, "tls_client_auth"), + Self::SelfSignedTlsClientAuth => write!(f, "self_signed_tls_client_auth"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for OAuthClientAuthenticationMethod { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "none" => Ok(Self::None), + "client_secret_post" => Ok(Self::ClientSecretPost), + "client_secret_basic" => Ok(Self::ClientSecretBasic), + "client_secret_jwt" => Ok(Self::ClientSecretJwt), + "private_key_jwt" => Ok(Self::PrivateKeyJwt), + "tls_client_auth" => Ok(Self::TlsClientAuth), + "self_signed_tls_client_auth" => Ok(Self::SelfSignedTlsClientAuth), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for OAuthClientAuthenticationMethod { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for OAuthClientAuthenticationMethod { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for OAuthClientAuthenticationMethod { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("OAuthClientAuthenticationMethod") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "const": "none", + }), + // --- + schemars::json_schema!({ + "const": "client_secret_post", + }), + // --- + schemars::json_schema!({ + "const": "client_secret_basic", + }), + // --- + schemars::json_schema!({ + "const": "client_secret_jwt", + }), + // --- + schemars::json_schema!({ + "const": "private_key_jwt", + }), + // --- + schemars::json_schema!({ + "const": "tls_client_auth", + }), + // --- + schemars::json_schema!({ + "const": "self_signed_tls_client_auth", + }), + ]; + + let description = r"OAuth Token Endpoint Authentication Method"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} + +/// PKCE Code Challenge Method +/// +/// Source: +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[non_exhaustive] +pub enum PkceCodeChallengeMethod { + /// `plain` + Plain, + + /// `S256` + S256, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for PkceCodeChallengeMethod { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Plain => write!(f, "plain"), + Self::S256 => write!(f, "S256"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for PkceCodeChallengeMethod { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "plain" => Ok(Self::Plain), + "S256" => Ok(Self::S256), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +impl<'de> serde::Deserialize<'de> for PkceCodeChallengeMethod { + fn deserialize(deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + core::str::FromStr::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for PkceCodeChallengeMethod { + fn serialize(&self, serializer: S) -> Result + where + S: serde::ser::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl schemars::JsonSchema for PkceCodeChallengeMethod { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("PkceCodeChallengeMethod") + } + + #[allow(clippy::too_many_lines)] + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + let enums = vec![ + // --- + schemars::json_schema!({ + "const": "plain", + }), + // --- + schemars::json_schema!({ + "const": "S256", + }), + ]; + + let description = r"PKCE Code Challenge Method"; + schemars::json_schema!({ + "description": description, + "anyOf": enums, + }) + } +} diff --git a/matrix-authentication-service/crates/jose/Cargo.toml b/matrix-authentication-service/crates/jose/Cargo.toml new file mode 100644 index 00000000..ac9c4d96 --- /dev/null +++ b/matrix-authentication-service/crates/jose/Cargo.toml @@ -0,0 +1,47 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-jose" +description = "JSON Object Signing and Encryption (JWT & co) utilities" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +base64ct.workspace = true +chrono.workspace = true +digest.workspace = true +ecdsa.workspace = true +elliptic-curve.workspace = true +generic-array.workspace = true +hmac.workspace = true +k256.workspace = true +p256.workspace = true +p384.workspace = true +rand.workspace = true +rsa.workspace = true +schemars.workspace = true +sec1.workspace = true +serde_json.workspace = true +serde_with.workspace = true +serde.workspace = true +sha2.workspace = true +signature.workspace = true +thiserror.workspace = true +url.workspace = true + +mas-iana.workspace = true + +[dev-dependencies] +insta.workspace = true +rand_chacha.workspace = true diff --git a/matrix-authentication-service/crates/jose/src/base64.rs b/matrix-authentication-service/crates/jose/src/base64.rs new file mode 100644 index 00000000..5886368a --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/base64.rs @@ -0,0 +1,177 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Transparent base64 encoding / decoding as part of (de)serialization. + +use std::{borrow::Cow, fmt, marker::PhantomData, str}; + +use base64ct::Encoding; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{self, Unexpected, Visitor}, +}; + +/// A wrapper around `Vec` that (de)serializes from / to a base64 string. +/// +/// The generic parameter `C` represents the base64 flavor. +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] +pub struct Base64 { + bytes: Vec, + // Invariant PhantomData, Send + Sync + _phantom_conf: PhantomData C>, +} + +pub type Base64UrlNoPad = Base64; + +impl Base64 { + /// Create a `Base64` instance from raw bytes, to be base64-encoded in + /// serialization. + #[must_use] + pub fn new(bytes: Vec) -> Self { + Self { + bytes, + _phantom_conf: PhantomData, + } + } + + /// Get a reference to the raw bytes held by this `Base64` instance. + #[must_use] + pub fn as_bytes(&self) -> &[u8] { + self.bytes.as_ref() + } + + /// Encode the bytes contained in this `Base64` instance to unpadded base64. + #[must_use] + pub fn encode(&self) -> String { + C::encode_string(self.as_bytes()) + } + + /// Get the raw bytes held by this `Base64` instance. + #[must_use] + pub fn into_inner(self) -> Vec { + self.bytes + } + + /// Create a `Base64` instance containing an empty `Vec`. + #[must_use] + pub fn empty() -> Self { + Self::new(Vec::new()) + } + + /// Parse some base64-encoded data to create a `Base64` instance. + /// + /// # Errors + /// + /// Returns an error if the input is not valid base64. + pub fn parse(encoded: &str) -> Result { + C::decode_vec(encoded).map(Self::new) + } +} + +impl fmt::Debug for Base64 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.encode().fmt(f) + } +} + +impl fmt::Display for Base64 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.encode().fmt(f) + } +} + +impl<'de, C: Encoding> Deserialize<'de> for Base64 { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let encoded = deserialize_cow_str(deserializer)?; + Self::parse(&encoded).map_err(de::Error::custom) + } +} + +impl Serialize for Base64 { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.encode()) + } +} + +/// Deserialize a `Cow<'de, str>`. +/// +/// Different from serde's implementation of `Deserialize` for `Cow` since it +/// borrows from the input when possible. +pub fn deserialize_cow_str<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + deserializer.deserialize_string(CowStrVisitor) +} + +struct CowStrVisitor; + +impl<'de> Visitor<'de> for CowStrVisitor { + type Value = Cow<'de, str>; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("a string") + } + + fn visit_borrowed_str(self, v: &'de str) -> Result + where + E: de::Error, + { + Ok(Cow::Borrowed(v)) + } + + fn visit_borrowed_bytes(self, v: &'de [u8]) -> Result + where + E: de::Error, + { + match str::from_utf8(v) { + Ok(s) => Ok(Cow::Borrowed(s)), + Err(_) => Err(de::Error::invalid_value(Unexpected::Bytes(v), &self)), + } + } + + fn visit_str(self, v: &str) -> Result + where + E: de::Error, + { + Ok(Cow::Owned(v.to_owned())) + } + + fn visit_string(self, v: String) -> Result + where + E: de::Error, + { + Ok(Cow::Owned(v)) + } + + fn visit_bytes(self, v: &[u8]) -> Result + where + E: de::Error, + { + match str::from_utf8(v) { + Ok(s) => Ok(Cow::Owned(s.to_owned())), + Err(_) => Err(de::Error::invalid_value(Unexpected::Bytes(v), &self)), + } + } + + fn visit_byte_buf(self, v: Vec) -> Result + where + E: de::Error, + { + match String::from_utf8(v) { + Ok(s) => Ok(Cow::Owned(s)), + Err(e) => Err(de::Error::invalid_value( + Unexpected::Bytes(&e.into_bytes()), + &self, + )), + } + } +} diff --git a/matrix-authentication-service/crates/jose/src/claims.rs b/matrix-authentication-service/crates/jose/src/claims.rs new file mode 100644 index 00000000..3ab6e26e --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/claims.rs @@ -0,0 +1,879 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, convert::Infallible, marker::PhantomData, ops::Deref}; + +use base64ct::{Base64UrlUnpadded, Encoding}; +use mas_iana::jose::JsonWebSignatureAlg; +use serde::{Deserialize, Serialize, de::DeserializeOwned}; +use sha2::{Digest, Sha256, Sha384, Sha512}; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum ClaimError { + #[error("missing claim {0:?}")] + MissingClaim(&'static str), + + #[error("invalid claim {0:?}")] + InvalidClaim(&'static str), + + #[error("could not validate claim {claim:?}")] + ValidationError { + claim: &'static str, + #[source] + source: Box, + }, +} + +pub trait Validator { + /// The associated error type returned by this validator. + type Error; + + /// Validate a claim value + /// + /// # Errors + /// + /// Returns an error if the value is invalid. + fn validate(&self, value: &T) -> Result<(), Self::Error>; +} + +impl Validator for () { + type Error = Infallible; + + fn validate(&self, _value: &T) -> Result<(), Self::Error> { + Ok(()) + } +} + +pub struct Claim { + value: &'static str, + t: PhantomData, + v: PhantomData, +} + +impl Claim +where + V: Validator, +{ + #[must_use] + pub const fn new(claim: &'static str) -> Self { + Self { + value: claim, + t: PhantomData, + v: PhantomData, + } + } + + /// Insert a claim into the given claims map. + /// + /// # Errors + /// + /// Returns an error if the value failed to serialize. + pub fn insert( + &self, + claims: &mut HashMap, + value: I, + ) -> Result<(), ClaimError> + where + I: Into, + T: Serialize, + { + let value = value.into(); + let value: serde_json::Value = + serde_json::to_value(&value).map_err(|_| ClaimError::InvalidClaim(self.value))?; + claims.insert(self.value.to_owned(), value); + + Ok(()) + } + + /// Extract a claim from the given claims map. + /// + /// # Errors + /// + /// Returns an error if the value failed to deserialize, if its value is + /// invalid or if the claim is missing. + pub fn extract_required( + &self, + claims: &mut HashMap, + ) -> Result + where + T: DeserializeOwned, + V: Default, + V::Error: std::error::Error + Send + Sync + 'static, + { + let validator = V::default(); + self.extract_required_with_options(claims, validator) + } + + /// Extract a claim from the given claims map, with the given options. + /// + /// # Errors + /// + /// Returns an error if the value failed to deserialize, if its value is + /// invalid or if the claim is missing. + pub fn extract_required_with_options( + &self, + claims: &mut HashMap, + validator: I, + ) -> Result + where + T: DeserializeOwned, + I: Into, + V::Error: std::error::Error + Send + Sync + 'static, + { + let validator: V = validator.into(); + let claim = claims + .remove(self.value) + .ok_or(ClaimError::MissingClaim(self.value))?; + + let res = + serde_json::from_value(claim).map_err(|_| ClaimError::InvalidClaim(self.value))?; + validator + .validate(&res) + .map_err(|source| ClaimError::ValidationError { + claim: self.value, + source: Box::new(source), + })?; + Ok(res) + } + + /// Extract a claim from the given claims map, if it exists. + /// + /// # Errors + /// + /// Returns an error if the value failed to deserialize or if its value is + /// invalid. + pub fn extract_optional( + &self, + claims: &mut HashMap, + ) -> Result, ClaimError> + where + T: DeserializeOwned, + V: Default, + V::Error: std::error::Error + Send + Sync + 'static, + { + let validator = V::default(); + self.extract_optional_with_options(claims, validator) + } + + /// Extract a claim from the given claims map, if it exists, with the given + /// options. + /// + /// # Errors + /// + /// Returns an error if the value failed to deserialize or if its value is + /// invalid. + pub fn extract_optional_with_options( + &self, + claims: &mut HashMap, + validator: I, + ) -> Result, ClaimError> + where + T: DeserializeOwned, + I: Into, + V::Error: std::error::Error + Send + Sync + 'static, + { + match self.extract_required_with_options(claims, validator) { + Ok(v) => Ok(Some(v)), + Err(ClaimError::MissingClaim(_)) => Ok(None), + Err(e) => Err(e), + } + } + + /// Assert that the claim is absent. + /// + /// # Errors + /// + /// Returns an error if the claim is present. + pub fn assert_absent( + &self, + claims: &HashMap, + ) -> Result<(), ClaimError> { + if claims.contains_key(self.value) { + Err(ClaimError::InvalidClaim(self.value)) + } else { + Ok(()) + } + } +} + +#[derive(Debug, Clone)] +pub struct TimeOptions { + when: chrono::DateTime, + leeway: chrono::Duration, +} + +impl TimeOptions { + #[must_use] + pub fn new(when: chrono::DateTime) -> Self { + Self { + when, + leeway: chrono::Duration::microseconds(5 * 60 * 1000 * 1000), + } + } + + #[must_use] + pub fn leeway(mut self, leeway: chrono::Duration) -> Self { + self.leeway = leeway; + self + } +} + +#[derive(Debug, Clone, Copy, Error)] +#[error("Current time is too far away")] +pub struct TimeTooFarError; + +#[derive(Debug, Clone)] +pub struct TimeNotAfter(TimeOptions); + +impl Validator for TimeNotAfter { + type Error = TimeTooFarError; + fn validate(&self, value: &Timestamp) -> Result<(), Self::Error> { + if self.0.when <= value.0 + self.0.leeway { + Ok(()) + } else { + Err(TimeTooFarError) + } + } +} + +impl From for TimeNotAfter { + fn from(opt: TimeOptions) -> Self { + Self(opt) + } +} + +impl From<&TimeOptions> for TimeNotAfter { + fn from(opt: &TimeOptions) -> Self { + opt.clone().into() + } +} + +#[derive(Debug, Clone)] +pub struct TimeNotBefore(TimeOptions); + +impl Validator for TimeNotBefore { + type Error = TimeTooFarError; + + fn validate(&self, value: &Timestamp) -> Result<(), Self::Error> { + if self.0.when >= value.0 - self.0.leeway { + Ok(()) + } else { + Err(TimeTooFarError) + } + } +} + +impl From for TimeNotBefore { + fn from(opt: TimeOptions) -> Self { + Self(opt) + } +} + +impl From<&TimeOptions> for TimeNotBefore { + fn from(opt: &TimeOptions) -> Self { + opt.clone().into() + } +} + +/// Hash the given token with the given algorithm for an ID Token claim. +/// +/// According to the [OpenID Connect Core 1.0 specification]. +/// +/// # Errors +/// +/// Returns an error if the algorithm is not supported. +/// +/// [OpenID Connect Core 1.0 specification]: https://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken +pub fn hash_token(alg: &JsonWebSignatureAlg, token: &str) -> Result { + let bits = match alg { + JsonWebSignatureAlg::Hs256 + | JsonWebSignatureAlg::Rs256 + | JsonWebSignatureAlg::Es256 + | JsonWebSignatureAlg::Ps256 + | JsonWebSignatureAlg::Es256K => { + let mut hasher = Sha256::new(); + hasher.update(token); + let hash: [u8; 32] = hasher.finalize().into(); + // Left-most half + hash[..16].to_owned() + } + JsonWebSignatureAlg::Hs384 + | JsonWebSignatureAlg::Rs384 + | JsonWebSignatureAlg::Es384 + | JsonWebSignatureAlg::Ps384 => { + let mut hasher = Sha384::new(); + hasher.update(token); + let hash: [u8; 48] = hasher.finalize().into(); + // Left-most half + hash[..24].to_owned() + } + JsonWebSignatureAlg::Hs512 + | JsonWebSignatureAlg::Rs512 + | JsonWebSignatureAlg::Es512 + | JsonWebSignatureAlg::Ps512 => { + let mut hasher = Sha512::new(); + hasher.update(token); + let hash: [u8; 64] = hasher.finalize().into(); + // Left-most half + hash[..32].to_owned() + } + _ => return Err(TokenHashError::UnsupportedAlgorithm), + }; + + Ok(Base64UrlUnpadded::encode_string(&bits)) +} + +#[derive(Debug, Clone, Copy, Error)] +pub enum TokenHashError { + #[error("Hashes don't match")] + HashMismatch, + + #[error("Unsupported algorithm for hashing")] + UnsupportedAlgorithm, +} + +#[derive(Debug, Clone)] +pub struct TokenHash<'a> { + alg: &'a JsonWebSignatureAlg, + token: &'a str, +} + +impl<'a> TokenHash<'a> { + /// Creates a new `TokenHash` validator for the given algorithm and token. + #[must_use] + pub fn new(alg: &'a JsonWebSignatureAlg, token: &'a str) -> Self { + Self { alg, token } + } +} + +impl Validator for TokenHash<'_> { + type Error = TokenHashError; + fn validate(&self, value: &String) -> Result<(), Self::Error> { + if hash_token(self.alg, self.token)? == *value { + Ok(()) + } else { + Err(TokenHashError::HashMismatch) + } + } +} + +#[derive(Debug, Clone, Copy, Error)] +#[error("Values don't match")] +pub struct EqualityError; + +#[derive(Debug, Clone)] +pub struct Equality<'a, T: ?Sized> { + value: &'a T, +} + +impl<'a, T: ?Sized> Equality<'a, T> { + /// Creates a new `Equality` validator for the given value. + #[must_use] + pub fn new(value: &'a T) -> Self { + Self { value } + } +} + +impl Validator for Equality<'_, T2> +where + T2: PartialEq + ?Sized, +{ + type Error = EqualityError; + fn validate(&self, value: &T1) -> Result<(), Self::Error> { + if *self.value == *value { + Ok(()) + } else { + Err(EqualityError) + } + } +} + +impl<'a, T: ?Sized> From<&'a T> for Equality<'a, T> { + fn from(value: &'a T) -> Self { + Self::new(value) + } +} + +#[derive(Debug, Clone)] +pub struct Contains<'a, T> { + value: &'a T, +} + +impl<'a, T> Contains<'a, T> { + /// Creates a new `Contains` validator for the given value. + #[must_use] + pub fn new(value: &'a T) -> Self { + Self { value } + } +} + +#[derive(Debug, Clone, Copy, Error)] +#[error("OneOrMany doesn't contain value")] +pub struct ContainsError; + +impl Validator> for Contains<'_, T> +where + T: PartialEq, +{ + type Error = ContainsError; + fn validate(&self, value: &OneOrMany) -> Result<(), Self::Error> { + if value.contains(self.value) { + Ok(()) + } else { + Err(ContainsError) + } + } +} + +impl<'a, T> From<&'a T> for Contains<'a, T> { + fn from(value: &'a T) -> Self { + Self::new(value) + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +#[serde(transparent)] +pub struct Timestamp(#[serde(with = "chrono::serde::ts_seconds")] chrono::DateTime); + +impl Deref for Timestamp { + type Target = chrono::DateTime; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From> for Timestamp { + fn from(value: chrono::DateTime) -> Self { + Timestamp(value) + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +#[serde( + transparent, + bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'de>") +)] +pub struct OneOrMany( + // serde_as seems to not work properly with #[serde(transparent)] + // We have use plain old #[serde(with = ...)] with serde_with's utilities, which is a bit + // verbose but works + #[serde( + with = "serde_with::As::>" + )] + Vec, +); + +impl Deref for OneOrMany { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From> for OneOrMany { + fn from(value: Vec) -> Self { + Self(value) + } +} + +impl From for OneOrMany { + fn from(value: T) -> Self { + Self(vec![value]) + } +} + +/// Claims defined in RFC7519 sec. 4.1 +/// +mod rfc7519 { + use super::{Claim, Contains, Equality, OneOrMany, TimeNotAfter, TimeNotBefore, Timestamp}; + + pub const ISS: Claim> = Claim::new("iss"); + pub const SUB: Claim = Claim::new("sub"); + pub const AUD: Claim, Contains> = Claim::new("aud"); + pub const NBF: Claim = Claim::new("nbf"); + pub const EXP: Claim = Claim::new("exp"); + pub const IAT: Claim = Claim::new("iat"); + pub const JTI: Claim = Claim::new("jti"); +} + +/// Claims defined in OIDC.Core sec. 2 and sec. 5.1 +/// +/// +mod oidc_core { + use url::Url; + + use super::{Claim, Equality, Timestamp, TokenHash}; + + pub const AUTH_TIME: Claim = Claim::new("auth_time"); + pub const NONCE: Claim> = Claim::new("nonce"); + pub const AT_HASH: Claim = Claim::new("at_hash"); + pub const C_HASH: Claim = Claim::new("c_hash"); + + pub const NAME: Claim = Claim::new("name"); + pub const GIVEN_NAME: Claim = Claim::new("given_name"); + pub const FAMILY_NAME: Claim = Claim::new("family_name"); + pub const MIDDLE_NAME: Claim = Claim::new("middle_name"); + pub const NICKNAME: Claim = Claim::new("nickname"); + pub const PREFERRED_USERNAME: Claim = Claim::new("preferred_username"); + pub const PROFILE: Claim = Claim::new("profile"); + pub const PICTURE: Claim = Claim::new("picture"); + pub const WEBSITE: Claim = Claim::new("website"); + // TODO: email type? + pub const EMAIL: Claim = Claim::new("email"); + pub const EMAIL_VERIFIED: Claim = Claim::new("email_verified"); + pub const GENDER: Claim = Claim::new("gender"); + // TODO: date type + pub const BIRTHDATE: Claim = Claim::new("birthdate"); + // TODO: timezone type + pub const ZONEINFO: Claim = Claim::new("zoneinfo"); + // TODO: locale type + pub const LOCALE: Claim = Claim::new("locale"); + // TODO: phone number type + pub const PHONE_NUMBER: Claim = Claim::new("phone_number"); + pub const PHONE_NUMBER_VERIFIED: Claim = Claim::new("phone_number_verified"); + // TODO: pub const ADDRESS: Claim = Claim::new("address"); + pub const UPDATED_AT: Claim = Claim::new("updated_at"); +} + +/// Claims defined in OpenID.FrontChannel +/// +mod oidc_frontchannel { + use super::Claim; + + pub const SID: Claim = Claim::new("sid"); +} + +pub use self::{oidc_core::*, oidc_frontchannel::*, rfc7519::*}; + +#[cfg(test)] +mod tests { + use chrono::TimeZone; + + use super::*; + + #[test] + fn timestamp_serde() { + let datetime = Timestamp( + chrono::Utc + .with_ymd_and_hms(2018, 1, 18, 1, 30, 22) + .unwrap(), + ); + let timestamp = serde_json::Value::Number(1_516_239_022.into()); + + assert_eq!(datetime, serde_json::from_value(timestamp.clone()).unwrap()); + assert_eq!(timestamp, serde_json::to_value(&datetime).unwrap()); + } + + #[test] + fn one_or_many_serde() { + let one = OneOrMany(vec!["one".to_owned()]); + let many = OneOrMany(vec!["one".to_owned(), "two".to_owned()]); + + assert_eq!( + one, + serde_json::from_value(serde_json::json!("one")).unwrap() + ); + assert_eq!( + one, + serde_json::from_value(serde_json::json!(["one"])).unwrap() + ); + assert_eq!( + many, + serde_json::from_value(serde_json::json!(["one", "two"])).unwrap() + ); + assert_eq!( + serde_json::to_value(&one).unwrap(), + serde_json::json!("one") + ); + assert_eq!( + serde_json::to_value(&many).unwrap(), + serde_json::json!(["one", "two"]) + ); + } + + #[test] + fn extract_claims() { + let now = chrono::Utc + .with_ymd_and_hms(2018, 1, 18, 1, 30, 22) + .unwrap(); + let expiration = now + chrono::Duration::microseconds(5 * 60 * 1000 * 1000); + let time_options = TimeOptions::new(now).leeway(chrono::Duration::zero()); + + let claims = serde_json::json!({ + "iss": "https://foo.com", + "sub": "johndoe", + "aud": ["abcd-efgh"], + "iat": 1_516_239_022, + "nbf": 1_516_239_022, + "exp": 1_516_239_322, + "jti": "1122-3344-5566-7788", + }); + let mut claims = serde_json::from_value(claims).unwrap(); + + let iss = ISS + .extract_required_with_options(&mut claims, "https://foo.com") + .unwrap(); + let sub = SUB.extract_optional(&mut claims).unwrap(); + let aud = AUD + .extract_optional_with_options(&mut claims, &"abcd-efgh".to_owned()) + .unwrap(); + let nbf = NBF + .extract_optional_with_options(&mut claims, &time_options) + .unwrap(); + let exp = EXP + .extract_optional_with_options(&mut claims, &time_options) + .unwrap(); + let iat = IAT + .extract_optional_with_options(&mut claims, &time_options) + .unwrap(); + let jti = JTI.extract_optional(&mut claims).unwrap(); + + assert_eq!(iss, "https://foo.com".to_owned()); + assert_eq!(sub, Some("johndoe".to_owned())); + assert_eq!(aud.as_deref(), Some(&vec!["abcd-efgh".to_owned()])); + assert_eq!(iat.as_deref(), Some(&now)); + assert_eq!(nbf.as_deref(), Some(&now)); + assert_eq!(exp.as_deref(), Some(&expiration)); + assert_eq!(jti, Some("1122-3344-5566-7788".to_owned())); + + assert!(claims.is_empty()); + } + + #[test] + fn time_validation() { + let now = chrono::Utc + .with_ymd_and_hms(2018, 1, 18, 1, 30, 22) + .unwrap(); + + let claims = serde_json::json!({ + "iat": 1_516_239_022, + "nbf": 1_516_239_022, + "exp": 1_516_239_322, + }); + let claims: HashMap = serde_json::from_value(claims).unwrap(); + + // Everything should be fine at this point, the claims iat & nbf == now + { + let mut claims = claims.clone(); + + // so no leeway should be fine as well here + let time_options = TimeOptions::new(now).leeway(chrono::Duration::zero()); + assert!( + IAT.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + assert!( + NBF.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + assert!( + EXP.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + } + + // Let's go back in time a bit + let now = now - chrono::Duration::microseconds(60 * 1000 * 1000); + + { + // There is now a time variance between the two parties... + let mut claims = claims.clone(); + + // but no time variance is allowed. "iat" and "nbf" validation will fail + let time_options = TimeOptions::new(now).leeway(chrono::Duration::zero()); + assert!(matches!( + IAT.extract_required_with_options(&mut claims, &time_options), + Err(ClaimError::ValidationError { claim: "iat", .. }), + )); + assert!(matches!( + NBF.extract_required_with_options(&mut claims, &time_options), + Err(ClaimError::ValidationError { claim: "nbf", .. }), + )); + assert!( + EXP.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + } + + { + // This time, there is a two minute leeway, they all should be fine + let mut claims = claims.clone(); + + // but no time variance is allowed. "iat" and "nbf" validation will fail + let time_options = + TimeOptions::new(now).leeway(chrono::Duration::microseconds(2 * 60 * 1000 * 1000)); + assert!( + IAT.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + assert!( + NBF.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + assert!( + EXP.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + } + + // Let's wait some time so it expires + let now = now + chrono::Duration::microseconds((1 + 6) * 60 * 1000 * 1000); + + { + // At this point, the claims expired one minute ago + let mut claims = claims.clone(); + + // but no time variance is allowed. "exp" validation will fail + let time_options = TimeOptions::new(now).leeway(chrono::Duration::zero()); + assert!( + IAT.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + assert!( + NBF.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + assert!(matches!( + EXP.extract_required_with_options(&mut claims, &time_options), + Err(ClaimError::ValidationError { claim: "exp", .. }), + )); + } + + { + let mut claims = claims; + + // Same, but with a 2 minutes leeway should be fine then + let time_options = + TimeOptions::new(now).leeway(chrono::Duration::try_minutes(2).unwrap()); + assert!( + IAT.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + assert!( + NBF.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + assert!( + EXP.extract_required_with_options(&mut claims, &time_options) + .is_ok() + ); + } + } + + #[test] + fn invalid_claims() { + let now = chrono::Utc + .with_ymd_and_hms(2018, 1, 18, 1, 30, 22) + .unwrap(); + let time_options = TimeOptions::new(now).leeway(chrono::Duration::zero()); + + let claims = serde_json::json!({ + "iss": 123, + "sub": 456, + "aud": 789, + "iat": "123", + "nbf": "456", + "exp": "789", + "jti": 123, + }); + let mut claims = serde_json::from_value(claims).unwrap(); + + assert!(matches!( + ISS.extract_required_with_options(&mut claims, "https://foo.com"), + Err(ClaimError::InvalidClaim("iss")) + )); + assert!(matches!( + SUB.extract_required(&mut claims), + Err(ClaimError::InvalidClaim("sub")) + )); + assert!(matches!( + AUD.extract_required_with_options(&mut claims, &"abcd-efgh".to_owned()), + Err(ClaimError::InvalidClaim("aud")) + )); + assert!(matches!( + NBF.extract_required_with_options(&mut claims, &time_options), + Err(ClaimError::InvalidClaim("nbf")) + )); + assert!(matches!( + EXP.extract_required_with_options(&mut claims, &time_options), + Err(ClaimError::InvalidClaim("exp")) + )); + assert!(matches!( + IAT.extract_required_with_options(&mut claims, &time_options), + Err(ClaimError::InvalidClaim("iat")) + )); + assert!(matches!( + JTI.extract_required(&mut claims), + Err(ClaimError::InvalidClaim("jti")) + )); + } + + #[test] + fn missing_claims() { + // Empty claim set + let mut claims = HashMap::new(); + + assert!(matches!( + ISS.extract_required_with_options(&mut claims, "https://foo.com"), + Err(ClaimError::MissingClaim("iss")) + )); + assert!(matches!( + SUB.extract_required(&mut claims), + Err(ClaimError::MissingClaim("sub")) + )); + assert!(matches!( + AUD.extract_required_with_options(&mut claims, &"abcd-efgh".to_owned()), + Err(ClaimError::MissingClaim("aud")) + )); + + assert!(matches!( + ISS.extract_optional_with_options(&mut claims, "https://foo.com"), + Ok(None) + )); + assert!(matches!(SUB.extract_optional(&mut claims), Ok(None))); + assert!(matches!( + AUD.extract_optional_with_options(&mut claims, &"abcd-efgh".to_owned()), + Ok(None) + )); + } + + #[test] + fn string_eq_validation() { + let claims = serde_json::json!({ + "iss": "https://foo.com", + }); + let mut claims: HashMap = + serde_json::from_value(claims).unwrap(); + + ISS.extract_required_with_options(&mut claims.clone(), "https://foo.com") + .unwrap(); + + assert!(matches!( + ISS.extract_required_with_options(&mut claims, "https://bar.com"), + Err(ClaimError::ValidationError { claim: "iss", .. }), + )); + } + + #[test] + fn contains_validation() { + let claims = serde_json::json!({ + "aud": "abcd-efgh", + }); + let mut claims: HashMap = + serde_json::from_value(claims).unwrap(); + + AUD.extract_required_with_options(&mut claims.clone(), &"abcd-efgh".to_owned()) + .unwrap(); + + assert!(matches!( + AUD.extract_required_with_options(&mut claims, &"wxyz".to_owned()), + Err(ClaimError::ValidationError { claim: "aud", .. }), + )); + } +} diff --git a/matrix-authentication-service/crates/jose/src/constraints.rs b/matrix-authentication-service/crates/jose/src/constraints.rs new file mode 100644 index 00000000..16859c88 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/constraints.rs @@ -0,0 +1,252 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashSet; + +use mas_iana::jose::{JsonWebKeyType, JsonWebKeyUse, JsonWebSignatureAlg}; + +use crate::jwt::JsonWebSignatureHeader; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Constraint<'a> { + Alg { + constraint_alg: &'a JsonWebSignatureAlg, + }, + + Algs { + constraint_algs: &'a [JsonWebSignatureAlg], + }, + + Kid { + constraint_kid: &'a str, + }, + + Use { + constraint_use: &'a JsonWebKeyUse, + }, + + Kty { + constraint_kty: &'a JsonWebKeyType, + }, +} + +impl<'a> Constraint<'a> { + #[must_use] + pub fn alg(constraint_alg: &'a JsonWebSignatureAlg) -> Self { + Constraint::Alg { constraint_alg } + } + + #[must_use] + pub fn algs(constraint_algs: &'a [JsonWebSignatureAlg]) -> Self { + Constraint::Algs { constraint_algs } + } + + #[must_use] + pub fn kid(constraint_kid: &'a str) -> Self { + Constraint::Kid { constraint_kid } + } + + #[must_use] + pub fn use_(constraint_use: &'a JsonWebKeyUse) -> Self { + Constraint::Use { constraint_use } + } + + #[must_use] + pub fn kty(constraint_kty: &'a JsonWebKeyType) -> Self { + Constraint::Kty { constraint_kty } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ConstraintDecision { + Positive, + Neutral, + Negative, +} + +pub trait Constrainable { + fn alg(&self) -> Option<&JsonWebSignatureAlg> { + None + } + + /// List of available algorithms for this key + fn algs(&self) -> &[JsonWebSignatureAlg] { + &[] + } + + /// Key ID (`kid`) of this key + fn kid(&self) -> Option<&str> { + None + } + + /// Usage specified for this key + fn use_(&self) -> Option<&JsonWebKeyUse> { + None + } + + /// Key type (`kty`) of this key + fn kty(&self) -> JsonWebKeyType; +} + +impl Constraint<'_> { + fn decide(&self, constrainable: &T) -> ConstraintDecision { + match self { + Constraint::Alg { constraint_alg } => { + // If the constrainable has one specific alg defined, use that + if let Some(alg) = constrainable.alg() { + if alg == *constraint_alg { + ConstraintDecision::Positive + } else { + ConstraintDecision::Negative + } + // If not, check that the requested alg is valid for this + // constrainable + } else if constrainable.algs().contains(constraint_alg) { + ConstraintDecision::Neutral + } else { + ConstraintDecision::Negative + } + } + Constraint::Algs { constraint_algs } => { + if let Some(alg) = constrainable.alg() { + if constraint_algs.contains(alg) { + ConstraintDecision::Positive + } else { + ConstraintDecision::Negative + } + } else if constrainable + .algs() + .iter() + .any(|alg| constraint_algs.contains(alg)) + { + ConstraintDecision::Neutral + } else { + ConstraintDecision::Negative + } + } + Constraint::Kid { constraint_kid } => { + if let Some(kid) = constrainable.kid() { + if kid == *constraint_kid { + ConstraintDecision::Positive + } else { + ConstraintDecision::Negative + } + } else { + ConstraintDecision::Neutral + } + } + Constraint::Use { constraint_use } => { + if let Some(use_) = constrainable.use_() { + if use_ == *constraint_use { + ConstraintDecision::Positive + } else { + ConstraintDecision::Negative + } + } else { + ConstraintDecision::Neutral + } + } + Constraint::Kty { constraint_kty } => { + if **constraint_kty == constrainable.kty() { + ConstraintDecision::Positive + } else { + ConstraintDecision::Negative + } + } + } + } +} + +#[derive(Default)] +pub struct ConstraintSet<'a> { + constraints: HashSet>, +} + +impl<'a> FromIterator> for ConstraintSet<'a> { + fn from_iter>>(iter: T) -> Self { + Self { + constraints: HashSet::from_iter(iter), + } + } +} + +#[allow(dead_code)] +impl<'a> ConstraintSet<'a> { + pub fn new(constraints: impl IntoIterator>) -> Self { + constraints.into_iter().collect() + } + + pub fn filter<'b, T: Constrainable, I: IntoIterator>( + &self, + constrainables: I, + ) -> Vec<&'b T> { + let mut selected = Vec::new(); + + 'outer: for constrainable in constrainables { + let mut score = 0; + + for constraint in &self.constraints { + match constraint.decide(constrainable) { + ConstraintDecision::Positive => score += 1, + ConstraintDecision::Neutral => {} + // If any constraint was negative, don't add it to the candidates + ConstraintDecision::Negative => continue 'outer, + } + } + + selected.push((score, constrainable)); + } + + selected.sort_by_key(|(score, _)| *score); + + selected + .into_iter() + .map(|(_score, constrainable)| constrainable) + .collect() + } + + #[must_use] + pub fn alg(mut self, constraint_alg: &'a JsonWebSignatureAlg) -> Self { + self.constraints.insert(Constraint::alg(constraint_alg)); + self + } + + #[must_use] + pub fn algs(mut self, constraint_algs: &'a [JsonWebSignatureAlg]) -> Self { + self.constraints.insert(Constraint::algs(constraint_algs)); + self + } + + #[must_use] + pub fn kid(mut self, constraint_kid: &'a str) -> Self { + self.constraints.insert(Constraint::kid(constraint_kid)); + self + } + + #[must_use] + pub fn use_(mut self, constraint_use: &'a JsonWebKeyUse) -> Self { + self.constraints.insert(Constraint::use_(constraint_use)); + self + } + + #[must_use] + pub fn kty(mut self, constraint_kty: &'a JsonWebKeyType) -> Self { + self.constraints.insert(Constraint::kty(constraint_kty)); + self + } +} + +impl<'a> From<&'a JsonWebSignatureHeader> for ConstraintSet<'a> { + fn from(header: &'a JsonWebSignatureHeader) -> Self { + let mut constraints = Self::default().alg(header.alg()); + + if let Some(kid) = header.kid() { + constraints = constraints.kid(kid); + } + + constraints + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwa/asymmetric.rs b/matrix-authentication-service/crates/jose/src/jwa/asymmetric.rs new file mode 100644 index 00000000..b490d579 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwa/asymmetric.rs @@ -0,0 +1,508 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use digest::Digest; +use mas_iana::jose::{JsonWebKeyEcEllipticCurve, JsonWebSignatureAlg}; +use sha2::{Sha256, Sha384, Sha512}; +use signature::rand_core::CryptoRngCore; +use thiserror::Error; + +use super::signature::Signature; +use crate::jwk::{JsonWebKeyPrivateParameters, JsonWebKeyPublicParameters}; + +#[derive(Debug, Error)] +pub enum AsymmetricKeyFromJwkError { + #[error("Invalid RSA parameters")] + Rsa { + #[from] + inner: rsa::errors::Error, + }, + + #[error("Invalid Elliptic Curve parameters")] + EllipticCurve { + #[from] + inner: elliptic_curve::Error, + }, + + #[error("Unsupported algorithm {alg}")] + UnsupportedAlgorithm { alg: JsonWebSignatureAlg }, + + #[error("Key not suitable for algorithm {alg}")] + KeyNotSuitable { alg: JsonWebSignatureAlg }, +} + +/// An enum of all supported asymmetric signature algorithms verifying keys +#[non_exhaustive] +pub enum AsymmetricSigningKey { + Rs256(super::Rs256SigningKey), + Rs384(super::Rs384SigningKey), + Rs512(super::Rs512SigningKey), + Ps256(super::Ps256SigningKey), + Ps384(super::Ps384SigningKey), + Ps512(super::Ps512SigningKey), + Es256(super::Es256SigningKey), + Es384(super::Es384SigningKey), + Es256K(super::Es256KSigningKey), +} + +impl AsymmetricSigningKey { + /// Create a new signing key with the RS256 algorithm from the given RSA + /// private key. + #[must_use] + pub fn rs256(key: rsa::RsaPrivateKey) -> Self { + Self::Rs256(rsa::pkcs1v15::SigningKey::new(key)) + } + + /// Create a new signing key with the RS384 algorithm from the given RSA + /// private key. + #[must_use] + pub fn rs384(key: rsa::RsaPrivateKey) -> Self { + Self::Rs384(rsa::pkcs1v15::SigningKey::new(key)) + } + + /// Create a new signing key with the RS512 algorithm from the given RSA + /// private key. + #[must_use] + pub fn rs512(key: rsa::RsaPrivateKey) -> Self { + Self::Rs512(rsa::pkcs1v15::SigningKey::new(key)) + } + + /// Create a new signing key with the PS256 algorithm from the given RSA + /// private key. + #[must_use] + pub fn ps256(key: rsa::RsaPrivateKey) -> Self { + Self::Ps256(rsa::pss::SigningKey::new_with_salt_len( + key, + Sha256::output_size(), + )) + } + + /// Create a new signing key with the PS384 algorithm from the given RSA + /// private key. + #[must_use] + pub fn ps384(key: rsa::RsaPrivateKey) -> Self { + Self::Ps384(rsa::pss::SigningKey::new_with_salt_len( + key, + Sha384::output_size(), + )) + } + + /// Create a new signing key with the PS512 algorithm from the given RSA + /// private key. + #[must_use] + pub fn ps512(key: rsa::RsaPrivateKey) -> Self { + Self::Ps512(rsa::pss::SigningKey::new_with_salt_len( + key, + Sha512::output_size(), + )) + } + + /// Create a new signing key with the ES256 algorithm from the given ECDSA + /// private key. + #[must_use] + pub fn es256(key: elliptic_curve::SecretKey) -> Self { + Self::Es256(ecdsa::SigningKey::from(key)) + } + + /// Create a new signing key with the ES384 algorithm from the given ECDSA + /// private key. + #[must_use] + pub fn es384(key: elliptic_curve::SecretKey) -> Self { + Self::Es384(ecdsa::SigningKey::from(key)) + } + + /// Create a new signing key with the ES256K algorithm from the given ECDSA + /// private key. + #[must_use] + pub fn es256k(key: elliptic_curve::SecretKey) -> Self { + Self::Es256K(ecdsa::SigningKey::from(key)) + } + + /// Create a new signing key for the given algorithm from the given private + /// JWK parameters. + /// + /// # Errors + /// + /// Returns an error if the key parameters are not suitable for the given + /// algorithm. + pub fn from_jwk_and_alg( + params: &JsonWebKeyPrivateParameters, + alg: &JsonWebSignatureAlg, + ) -> Result { + match (params, alg) { + (JsonWebKeyPrivateParameters::Rsa(params), alg) => match alg { + JsonWebSignatureAlg::Rs256 => Ok(Self::rs256(params.try_into()?)), + JsonWebSignatureAlg::Rs384 => Ok(Self::rs384(params.try_into()?)), + JsonWebSignatureAlg::Rs512 => Ok(Self::rs512(params.try_into()?)), + JsonWebSignatureAlg::Ps256 => Ok(Self::ps256(params.try_into()?)), + JsonWebSignatureAlg::Ps384 => Ok(Self::ps384(params.try_into()?)), + JsonWebSignatureAlg::Ps512 => Ok(Self::ps512(params.try_into()?)), + _ => Err(AsymmetricKeyFromJwkError::KeyNotSuitable { alg: alg.clone() }), + }, + + (JsonWebKeyPrivateParameters::Ec(params), JsonWebSignatureAlg::Es256) + if params.crv == JsonWebKeyEcEllipticCurve::P256 => + { + Ok(Self::es256(params.try_into()?)) + } + + (JsonWebKeyPrivateParameters::Ec(params), JsonWebSignatureAlg::Es384) + if params.crv == JsonWebKeyEcEllipticCurve::P384 => + { + Ok(Self::es384(params.try_into()?)) + } + + (JsonWebKeyPrivateParameters::Ec(params), JsonWebSignatureAlg::Es512) + if params.crv == JsonWebKeyEcEllipticCurve::P521 => + { + Err(AsymmetricKeyFromJwkError::UnsupportedAlgorithm { alg: alg.clone() }) + } + + (JsonWebKeyPrivateParameters::Ec(params), JsonWebSignatureAlg::Es256K) + if params.crv == JsonWebKeyEcEllipticCurve::Secp256K1 => + { + Ok(Self::es256k(params.try_into()?)) + } + + (JsonWebKeyPrivateParameters::Okp(_params), _) => { + Err(AsymmetricKeyFromJwkError::UnsupportedAlgorithm { alg: alg.clone() }) + } + + _ => Err(AsymmetricKeyFromJwkError::KeyNotSuitable { alg: alg.clone() }), + } + } +} + +impl From for AsymmetricSigningKey { + fn from(key: super::Rs256SigningKey) -> Self { + Self::Rs256(key) + } +} + +impl From for AsymmetricSigningKey { + fn from(key: super::Rs384SigningKey) -> Self { + Self::Rs384(key) + } +} + +impl From for AsymmetricSigningKey { + fn from(key: super::Rs512SigningKey) -> Self { + Self::Rs512(key) + } +} + +impl From for AsymmetricSigningKey { + fn from(key: super::Ps256SigningKey) -> Self { + Self::Ps256(key) + } +} + +impl From for AsymmetricSigningKey { + fn from(key: super::Ps384SigningKey) -> Self { + Self::Ps384(key) + } +} + +impl From for AsymmetricSigningKey { + fn from(key: super::Ps512SigningKey) -> Self { + Self::Ps512(key) + } +} + +impl From for AsymmetricSigningKey { + fn from(key: super::Es256SigningKey) -> Self { + Self::Es256(key) + } +} + +impl From for AsymmetricSigningKey { + fn from(key: super::Es384SigningKey) -> Self { + Self::Es384(key) + } +} + +impl From for AsymmetricSigningKey { + fn from(key: super::Es256KSigningKey) -> Self { + Self::Es256K(key) + } +} + +impl signature::RandomizedSigner for AsymmetricSigningKey { + fn try_sign_with_rng( + &self, + rng: &mut impl CryptoRngCore, + msg: &[u8], + ) -> Result { + match self { + Self::Rs256(key) => { + let signature = key.try_sign_with_rng(rng, msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Rs384(key) => { + let signature = key.try_sign_with_rng(rng, msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Rs512(key) => { + let signature = key.try_sign_with_rng(rng, msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Ps256(key) => { + let signature = key.try_sign_with_rng(rng, msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Ps384(key) => { + let signature = key.try_sign_with_rng(rng, msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Ps512(key) => { + let signature = key.try_sign_with_rng(rng, msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Es256(key) => { + let signature: ecdsa::Signature<_> = key.try_sign_with_rng(rng, msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Es384(key) => { + let signature: ecdsa::Signature<_> = key.try_sign_with_rng(rng, msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Es256K(key) => { + let signature: ecdsa::Signature<_> = key.try_sign_with_rng(rng, msg)?; + Ok(Signature::from_signature(&signature)) + } + } + } +} + +/// An enum of all supported asymmetric signature algorithms signing keys +#[non_exhaustive] +pub enum AsymmetricVerifyingKey { + Rs256(super::Rs256VerifyingKey), + Rs384(super::Rs384VerifyingKey), + Rs512(super::Rs512VerifyingKey), + Ps256(super::Ps256VerifyingKey), + Ps384(super::Ps384VerifyingKey), + Ps512(super::Ps512VerifyingKey), + Es256(super::Es256VerifyingKey), + Es384(super::Es384VerifyingKey), + Es256K(super::Es256KVerifyingKey), +} + +impl AsymmetricVerifyingKey { + /// Create a new verifying key with the RS256 algorithm from the given RSA + /// public key. + #[must_use] + pub fn rs256(key: rsa::RsaPublicKey) -> Self { + Self::Rs256(rsa::pkcs1v15::VerifyingKey::new(key)) + } + + /// Create a new verifying key with the RS384 algorithm from the given RSA + /// public key. + #[must_use] + pub fn rs384(key: rsa::RsaPublicKey) -> Self { + Self::Rs384(rsa::pkcs1v15::VerifyingKey::new(key)) + } + + /// Create a new verifying key with the RS512 algorithm from the given RSA + /// public key. + #[must_use] + pub fn rs512(key: rsa::RsaPublicKey) -> Self { + Self::Rs512(rsa::pkcs1v15::VerifyingKey::new(key)) + } + + /// Create a new verifying key with the PS256 algorithm from the given RSA + /// public key. + #[must_use] + pub fn ps256(key: rsa::RsaPublicKey) -> Self { + Self::Ps256(rsa::pss::VerifyingKey::new(key)) + } + + /// Create a new verifying key with the PS384 algorithm from the given RSA + /// public key. + #[must_use] + pub fn ps384(key: rsa::RsaPublicKey) -> Self { + Self::Ps384(rsa::pss::VerifyingKey::new(key)) + } + + /// Create a new verifying key with the PS512 algorithm from the given RSA + /// public key. + #[must_use] + pub fn ps512(key: rsa::RsaPublicKey) -> Self { + Self::Ps512(rsa::pss::VerifyingKey::new(key)) + } + + /// Create a new verifying key with the ES256 algorithm from the given ECDSA + /// public key. + #[must_use] + pub fn es256(key: elliptic_curve::PublicKey) -> Self { + Self::Es256(ecdsa::VerifyingKey::from(key)) + } + + /// Create a new verifying key with the ES384 algorithm from the given ECDSA + /// public key. + #[must_use] + pub fn es384(key: elliptic_curve::PublicKey) -> Self { + Self::Es384(ecdsa::VerifyingKey::from(key)) + } + + /// Create a new verifying key with the ES256K algorithm from the given + /// ECDSA public key. + #[must_use] + pub fn es256k(key: elliptic_curve::PublicKey) -> Self { + Self::Es256K(ecdsa::VerifyingKey::from(key)) + } + + /// Create a new verifying key for the given algorithm from the given public + /// JWK parameters. + /// + /// # Errors + /// + /// Returns an error if the key parameters are not suitable for the given + /// algorithm. + pub fn from_jwk_and_alg( + params: &JsonWebKeyPublicParameters, + alg: &JsonWebSignatureAlg, + ) -> Result { + match (params, alg) { + (JsonWebKeyPublicParameters::Rsa(params), alg) => match alg { + JsonWebSignatureAlg::Rs256 => Ok(Self::rs256(params.try_into()?)), + JsonWebSignatureAlg::Rs384 => Ok(Self::rs384(params.try_into()?)), + JsonWebSignatureAlg::Rs512 => Ok(Self::rs512(params.try_into()?)), + JsonWebSignatureAlg::Ps256 => Ok(Self::ps256(params.try_into()?)), + JsonWebSignatureAlg::Ps384 => Ok(Self::ps384(params.try_into()?)), + JsonWebSignatureAlg::Ps512 => Ok(Self::ps512(params.try_into()?)), + _ => Err(AsymmetricKeyFromJwkError::KeyNotSuitable { alg: alg.clone() }), + }, + + (JsonWebKeyPublicParameters::Ec(params), JsonWebSignatureAlg::Es256) + if params.crv == JsonWebKeyEcEllipticCurve::P256 => + { + Ok(Self::es256(params.try_into()?)) + } + + (JsonWebKeyPublicParameters::Ec(params), JsonWebSignatureAlg::Es384) + if params.crv == JsonWebKeyEcEllipticCurve::P384 => + { + Ok(Self::es384(params.try_into()?)) + } + + (JsonWebKeyPublicParameters::Ec(params), JsonWebSignatureAlg::Es512) + if params.crv == JsonWebKeyEcEllipticCurve::P521 => + { + Err(AsymmetricKeyFromJwkError::UnsupportedAlgorithm { alg: alg.clone() }) + } + + (JsonWebKeyPublicParameters::Ec(params), JsonWebSignatureAlg::Es256K) + if params.crv == JsonWebKeyEcEllipticCurve::Secp256K1 => + { + Ok(Self::es256k(params.try_into()?)) + } + + (JsonWebKeyPublicParameters::Okp(_params), _) => { + Err(AsymmetricKeyFromJwkError::UnsupportedAlgorithm { alg: alg.clone() }) + } + + _ => Err(AsymmetricKeyFromJwkError::KeyNotSuitable { alg: alg.clone() }), + } + } +} + +impl From for AsymmetricVerifyingKey { + fn from(key: super::Rs256VerifyingKey) -> Self { + Self::Rs256(key) + } +} + +impl From for AsymmetricVerifyingKey { + fn from(key: super::Rs384VerifyingKey) -> Self { + Self::Rs384(key) + } +} + +impl From for AsymmetricVerifyingKey { + fn from(key: super::Rs512VerifyingKey) -> Self { + Self::Rs512(key) + } +} + +impl From for AsymmetricVerifyingKey { + fn from(key: super::Ps256VerifyingKey) -> Self { + Self::Ps256(key) + } +} + +impl From for AsymmetricVerifyingKey { + fn from(key: super::Ps384VerifyingKey) -> Self { + Self::Ps384(key) + } +} + +impl From for AsymmetricVerifyingKey { + fn from(key: super::Ps512VerifyingKey) -> Self { + Self::Ps512(key) + } +} + +impl From for AsymmetricVerifyingKey { + fn from(key: super::Es256VerifyingKey) -> Self { + Self::Es256(key) + } +} + +impl From for AsymmetricVerifyingKey { + fn from(key: super::Es384VerifyingKey) -> Self { + Self::Es384(key) + } +} + +impl From for AsymmetricVerifyingKey { + fn from(key: super::Es256KVerifyingKey) -> Self { + Self::Es256K(key) + } +} + +impl signature::Verifier for AsymmetricVerifyingKey { + fn verify(&self, msg: &[u8], signature: &Signature) -> Result<(), ecdsa::Error> { + match self { + Self::Rs256(key) => { + let signature = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Rs384(key) => { + let signature = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Rs512(key) => { + let signature = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Ps256(key) => { + let signature = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Ps384(key) => { + let signature = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Ps512(key) => { + let signature = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Es256(key) => { + let signature: ecdsa::Signature<_> = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Es384(key) => { + let signature: ecdsa::Signature<_> = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Es256K(key) => { + let signature: ecdsa::Signature<_> = signature.to_signature()?; + key.verify(msg, &signature) + } + } + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwa/hmac.rs b/matrix-authentication-service/crates/jose/src/jwa/hmac.rs new file mode 100644 index 00000000..ba24ee96 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwa/hmac.rs @@ -0,0 +1,129 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::marker::PhantomData; + +use digest::{ + Digest, Mac, OutputSizeUser, + crypto_common::BlockSizeUser, + generic_array::{ArrayLength, GenericArray}, +}; +use signature::{Signer, Verifier}; +use thiserror::Error; + +pub struct Signature> { + signature: GenericArray, +} + +impl> PartialEq for Signature { + fn eq(&self, other: &Self) -> bool { + self.signature == other.signature + } +} + +impl> Eq for Signature {} + +impl> std::fmt::Debug for Signature { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.signature) + } +} + +impl> Clone for Signature { + fn clone(&self) -> Self { + Self { + signature: self.signature.clone(), + } + } +} + +impl> From> for GenericArray { + fn from(val: Signature) -> Self { + val.signature + } +} + +impl<'a, S: ArrayLength> TryFrom<&'a [u8]> for Signature { + type Error = InvalidLength; + + fn try_from(value: &'a [u8]) -> Result { + if value.len() != S::to_usize() { + return Err(InvalidLength); + } + let mut signature = GenericArray::default(); + signature.copy_from_slice(value); + Ok(Self { signature }) + } +} + +impl> signature::SignatureEncoding for Signature { + type Repr = GenericArray; +} + +impl> AsRef<[u8]> for Signature { + fn as_ref(&self) -> &[u8] { + self.signature.as_ref() + } +} + +pub struct Hmac { + key: Vec, + digest: PhantomData, +} + +impl Hmac { + pub const fn new(key: Vec) -> Self { + Self { + key, + digest: PhantomData, + } + } +} + +#[derive(Error, Debug)] +#[error("invalid length")] +pub struct InvalidLength; + +impl From> for Hmac { + fn from(key: Vec) -> Self { + Self { + key, + digest: PhantomData, + } + } +} + +impl + Signer as OutputSizeUser>::OutputSize>> for Hmac +{ + fn try_sign( + &self, + msg: &[u8], + ) -> Result as OutputSizeUser>::OutputSize>, signature::Error> + { + let mut mac = as Mac>::new_from_slice(&self.key) + .map_err(signature::Error::from_source)?; + mac.update(msg); + let signature = mac.finalize().into_bytes(); + Ok(Signature { signature }) + } +} + +impl + Verifier as OutputSizeUser>::OutputSize>> for Hmac +{ + fn verify( + &self, + msg: &[u8], + signature: &Signature< as OutputSizeUser>::OutputSize>, + ) -> Result<(), signature::Error> { + let new_signature = self.try_sign(msg)?; + if &new_signature != signature { + return Err(signature::Error::new()); + } + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwa/mod.rs b/matrix-authentication-service/crates/jose/src/jwa/mod.rs new file mode 100644 index 00000000..f0130857 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwa/mod.rs @@ -0,0 +1,59 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_iana::jose::JsonWebSignatureAlg; +use sha2::{Sha256, Sha384, Sha512}; + +mod asymmetric; +pub(crate) mod hmac; +mod signature; +mod symmetric; + +pub use self::{ + asymmetric::{AsymmetricKeyFromJwkError, AsymmetricSigningKey, AsymmetricVerifyingKey}, + symmetric::{InvalidAlgorithm, SymmetricKey}, +}; + +pub type Hs256Key = self::hmac::Hmac; +pub type Hs384Key = self::hmac::Hmac; +pub type Hs512Key = self::hmac::Hmac; + +pub type Rs256SigningKey = rsa::pkcs1v15::SigningKey; +pub type Rs256VerifyingKey = rsa::pkcs1v15::VerifyingKey; +pub type Rs384SigningKey = rsa::pkcs1v15::SigningKey; +pub type Rs384VerifyingKey = rsa::pkcs1v15::VerifyingKey; +pub type Rs512SigningKey = rsa::pkcs1v15::SigningKey; +pub type Rs512VerifyingKey = rsa::pkcs1v15::VerifyingKey; + +pub type Ps256SigningKey = rsa::pss::SigningKey; +pub type Ps256VerifyingKey = rsa::pss::VerifyingKey; +pub type Ps384SigningKey = rsa::pss::SigningKey; +pub type Ps384VerifyingKey = rsa::pss::VerifyingKey; +pub type Ps512SigningKey = rsa::pss::SigningKey; +pub type Ps512VerifyingKey = rsa::pss::VerifyingKey; + +pub type Es256SigningKey = ecdsa::SigningKey; +pub type Es256VerifyingKey = ecdsa::VerifyingKey; +pub type Es384SigningKey = ecdsa::SigningKey; +pub type Es384VerifyingKey = ecdsa::VerifyingKey; +pub type Es256KSigningKey = ecdsa::SigningKey; +pub type Es256KVerifyingKey = ecdsa::VerifyingKey; + +/// All the signing algorithms supported by this crate. +pub const SUPPORTED_SIGNING_ALGORITHMS: [JsonWebSignatureAlg; 12] = [ + JsonWebSignatureAlg::Hs256, + JsonWebSignatureAlg::Hs384, + JsonWebSignatureAlg::Hs512, + JsonWebSignatureAlg::Rs256, + JsonWebSignatureAlg::Rs384, + JsonWebSignatureAlg::Rs512, + JsonWebSignatureAlg::Ps256, + JsonWebSignatureAlg::Ps384, + JsonWebSignatureAlg::Ps512, + JsonWebSignatureAlg::Es256, + JsonWebSignatureAlg::Es384, + JsonWebSignatureAlg::Es256K, +]; diff --git a/matrix-authentication-service/crates/jose/src/jwa/signature.rs b/matrix-authentication-service/crates/jose/src/jwa/signature.rs new file mode 100644 index 00000000..35c3cb97 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwa/signature.rs @@ -0,0 +1,54 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use signature::SignatureEncoding as _; + +#[derive(Debug, Clone)] +pub struct Signature { + bytes: Box<[u8]>, +} + +impl From for Box<[u8]> { + fn from(val: Signature) -> Self { + val.bytes + } +} + +impl<'a> From<&'a [u8]> for Signature { + fn from(value: &'a [u8]) -> Self { + Self { + bytes: value.into(), + } + } +} + +impl signature::SignatureEncoding for Signature { + type Repr = Box<[u8]>; +} + +impl Signature { + pub fn new(bytes: Vec) -> Self { + Self { + bytes: bytes.into(), + } + } + + pub fn from_signature(signature: &S) -> Self + where + S: signature::SignatureEncoding, + { + Self { + bytes: signature.to_vec().into(), + } + } + + pub fn to_signature(&self) -> Result + where + S: signature::SignatureEncoding, + { + S::try_from(&self.to_bytes()).map_err(|_| signature::Error::default()) + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwa/symmetric.rs b/matrix-authentication-service/crates/jose/src/jwa/symmetric.rs new file mode 100644 index 00000000..3e45b6ab --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwa/symmetric.rs @@ -0,0 +1,129 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_iana::jose::JsonWebSignatureAlg; +use thiserror::Error; + +use super::signature::Signature; + +// An enum of all supported symmetric signing algorithms keys +#[non_exhaustive] +pub enum SymmetricKey { + Hs256(super::Hs256Key), + Hs384(super::Hs384Key), + Hs512(super::Hs512Key), +} + +#[derive(Debug, Error)] +#[error("Invalid algorithm {alg} used for symetric key")] +pub struct InvalidAlgorithm { + pub alg: JsonWebSignatureAlg, + pub key: Vec, +} + +impl SymmetricKey { + /// Create a new symmetric key for the given algorithm with the given key. + /// + /// # Errors + /// + /// Returns an error if the algorithm is not supported. + pub fn new_for_alg(key: Vec, alg: &JsonWebSignatureAlg) -> Result { + match alg { + JsonWebSignatureAlg::Hs256 => Ok(Self::hs256(key)), + JsonWebSignatureAlg::Hs384 => Ok(Self::hs384(key)), + JsonWebSignatureAlg::Hs512 => Ok(Self::hs512(key)), + _ => Err(InvalidAlgorithm { + alg: alg.clone(), + key, + }), + } + } + + /// Create a new symmetric key using the HS256 algorithm with the given key. + #[must_use] + pub const fn hs256(key: Vec) -> Self { + Self::Hs256(super::Hs256Key::new(key)) + } + + /// Create a new symmetric key using the HS384 algorithm with the given key. + #[must_use] + pub const fn hs384(key: Vec) -> Self { + Self::Hs384(super::Hs384Key::new(key)) + } + + /// Create a new symmetric key using the HS512 algorithm with the given key. + #[must_use] + pub const fn hs512(key: Vec) -> Self { + Self::Hs512(super::Hs512Key::new(key)) + } +} + +impl From for SymmetricKey { + fn from(key: super::Hs256Key) -> Self { + Self::Hs256(key) + } +} + +impl From for SymmetricKey { + fn from(key: super::Hs384Key) -> Self { + Self::Hs384(key) + } +} + +impl From for SymmetricKey { + fn from(key: super::Hs512Key) -> Self { + Self::Hs512(key) + } +} + +impl signature::RandomizedSigner for SymmetricKey { + fn try_sign_with_rng( + &self, + _rng: &mut (impl rand::CryptoRng + rand::RngCore), + msg: &[u8], + ) -> Result { + // XXX: is that implementation alright? + signature::Signer::try_sign(self, msg) + } +} + +impl signature::Signer for SymmetricKey { + fn try_sign(&self, msg: &[u8]) -> Result { + match self { + Self::Hs256(key) => { + let signature = key.try_sign(msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Hs384(key) => { + let signature = key.try_sign(msg)?; + Ok(Signature::from_signature(&signature)) + } + Self::Hs512(key) => { + let signature = key.try_sign(msg)?; + Ok(Signature::from_signature(&signature)) + } + } + } +} + +impl signature::Verifier for SymmetricKey { + fn verify(&self, msg: &[u8], signature: &Signature) -> Result<(), signature::Error> { + match self { + Self::Hs256(key) => { + let signature = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Hs384(key) => { + let signature = signature.to_signature()?; + key.verify(msg, &signature) + } + Self::Hs512(key) => { + let signature = signature.to_signature()?; + key.verify(msg, &signature) + } + } + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwk/mod.rs b/matrix-authentication-service/crates/jose/src/jwk/mod.rs new file mode 100644 index 00000000..f794620b --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwk/mod.rs @@ -0,0 +1,573 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Ref: + +use mas_iana::jose::{ + JsonWebKeyEcEllipticCurve, JsonWebKeyOperation, JsonWebKeyType, JsonWebKeyUse, + JsonWebSignatureAlg, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +use sha2::{Digest, Sha256}; +use url::Url; + +use crate::{ + base64::{Base64, Base64UrlNoPad}, + constraints::{Constrainable, Constraint, ConstraintSet}, +}; + +pub(crate) mod private_parameters; +pub(crate) mod public_parameters; + +use self::private_parameters::SymetricKeyError; +pub use self::{ + private_parameters::JsonWebKeyPrivateParameters, public_parameters::JsonWebKeyPublicParameters, +}; + +pub trait ParametersInfo { + fn kty(&self) -> JsonWebKeyType; + fn possible_algs(&self) -> &[JsonWebSignatureAlg]; +} + +/// An utilitary trait to figure out the [`JsonWebKeyEcEllipticCurve`] value for +/// elliptic curves +trait JwkEcCurve { + const CRV: JsonWebKeyEcEllipticCurve; +} + +impl JwkEcCurve for p256::NistP256 { + const CRV: JsonWebKeyEcEllipticCurve = JsonWebKeyEcEllipticCurve::P256; +} + +impl JwkEcCurve for p384::NistP384 { + const CRV: JsonWebKeyEcEllipticCurve = JsonWebKeyEcEllipticCurve::P384; +} + +impl JwkEcCurve for k256::Secp256k1 { + const CRV: JsonWebKeyEcEllipticCurve = JsonWebKeyEcEllipticCurve::Secp256K1; +} + +#[skip_serializing_none] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct JsonWebKey

{ + #[serde(flatten)] + parameters: P, + + #[serde(default)] + r#use: Option, + + #[serde(default)] + key_ops: Option>, + + #[serde(default)] + alg: Option, + + #[serde(default)] + kid: Option, + + #[schemars(with = "Option")] + #[serde(default)] + x5u: Option, + + #[schemars(with = "Vec")] + #[serde(default)] + x5c: Option>, + + #[schemars(with = "Option")] + #[serde(default)] + x5t: Option, + + #[schemars(with = "Option")] + #[serde(default, rename = "x5t#S256")] + x5t_s256: Option, +} + +pub type PublicJsonWebKey = JsonWebKey; +pub type PrivateJsonWebKey = JsonWebKey; + +impl TryFrom for PublicJsonWebKey { + type Error = SymetricKeyError; + + fn try_from(value: PrivateJsonWebKey) -> Result { + value.try_map(JsonWebKeyPublicParameters::try_from) + } +} + +impl

JsonWebKey

{ + /// Create a new [`JsonWebKey`] with the given parameters. + #[must_use] + pub const fn new(parameters: P) -> Self { + Self { + parameters, + r#use: None, + key_ops: None, + alg: None, + kid: None, + x5u: None, + x5c: None, + x5t: None, + x5t_s256: None, + } + } + + /// Map the parameters of this [`JsonWebKey`] to a new type, with a fallible + /// mapper, consuming the original key. + /// + /// # Errors + /// + /// Returns an error if the mapper returns an error. + pub fn try_map(self, mapper: M) -> Result, E> + where + M: FnOnce(P) -> Result, + { + Ok(JsonWebKey { + parameters: mapper(self.parameters)?, + r#use: self.r#use, + key_ops: self.key_ops, + alg: self.alg, + kid: self.kid, + x5u: self.x5u, + x5c: self.x5c, + x5t: self.x5t, + x5t_s256: self.x5t_s256, + }) + } + + /// Map the parameters of this [`JsonWebKey`] to a new type, consuming the + /// original key. + pub fn map(self, mapper: M) -> JsonWebKey + where + M: FnOnce(P) -> O, + { + JsonWebKey { + parameters: mapper(self.parameters), + r#use: self.r#use, + key_ops: self.key_ops, + alg: self.alg, + kid: self.kid, + x5u: self.x5u, + x5c: self.x5c, + x5t: self.x5t, + x5t_s256: self.x5t_s256, + } + } + + /// Map the parameters of this [`JsonWebKey`] to a new type, with a fallible + /// mapper, cloning the other fields. + /// + /// # Errors + /// + /// Returns an error if the mapper returns an error. + pub fn try_cloned_map(&self, mapper: M) -> Result, E> + where + M: FnOnce(&P) -> Result, + { + Ok(JsonWebKey { + parameters: mapper(&self.parameters)?, + r#use: self.r#use.clone(), + key_ops: self.key_ops.clone(), + alg: self.alg.clone(), + kid: self.kid.clone(), + x5u: self.x5u.clone(), + x5c: self.x5c.clone(), + x5t: self.x5t.clone(), + x5t_s256: self.x5t_s256.clone(), + }) + } + + /// Map the parameters of this [`JsonWebKey`] to a new type, cloning the + /// other fields. + pub fn cloned_map(&self, mapper: M) -> JsonWebKey + where + M: FnOnce(&P) -> O, + { + JsonWebKey { + parameters: mapper(&self.parameters), + r#use: self.r#use.clone(), + key_ops: self.key_ops.clone(), + alg: self.alg.clone(), + kid: self.kid.clone(), + x5u: self.x5u.clone(), + x5c: self.x5c.clone(), + x5t: self.x5t.clone(), + x5t_s256: self.x5t_s256.clone(), + } + } + + /// Set the `use` field of this [`JsonWebKey`]. + #[must_use] + pub fn with_use(mut self, value: JsonWebKeyUse) -> Self { + self.r#use = Some(value); + self + } + + /// Set the `key_ops` field of this [`JsonWebKey`]. + #[must_use] + pub fn with_key_ops(mut self, key_ops: Vec) -> Self { + self.key_ops = Some(key_ops); + self + } + + /// Set the `alg` field of this [`JsonWebKey`]. + #[must_use] + pub fn with_alg(mut self, alg: JsonWebSignatureAlg) -> Self { + self.alg = Some(alg); + self + } + + /// Set the `kid` field of this [`JsonWebKey`]. + #[must_use] + pub fn with_kid(mut self, kid: impl Into) -> Self { + self.kid = Some(kid.into()); + self + } + + /// Get the `kid` field of this [`JsonWebKey`], if set. + #[must_use] + pub const fn alg(&self) -> Option<&JsonWebSignatureAlg> { + self.alg.as_ref() + } + + /// Get the inner parameters of this [`JsonWebKey`]. + #[must_use] + pub const fn params(&self) -> &P { + &self.parameters + } +} + +/// Methods to calculate RFC 7638 JWK Thumbprints. +pub trait Thumbprint { + /// Returns the RFC 7638 JWK Thumbprint JSON string. + fn thumbprint_prehashed(&self) -> String; + + /// Returns the RFC 7638 SHA256-hashed JWK Thumbprint. + fn thumbprint_sha256(&self) -> [u8; 32] { + Sha256::digest(self.thumbprint_prehashed()).into() + } + + /// Returns the RFC 7638 SHA256-hashed JWK Thumbprint as base64url string. + fn thumbprint_sha256_base64(&self) -> String { + Base64UrlNoPad::new(self.thumbprint_sha256().into()).encode() + } +} + +impl Thumbprint for JsonWebKey

{ + fn thumbprint_prehashed(&self) -> String { + self.parameters.thumbprint_prehashed() + } +} + +impl

Constrainable for JsonWebKey

+where + P: ParametersInfo, +{ + fn kid(&self) -> Option<&str> { + self.kid.as_deref() + } + + fn kty(&self) -> JsonWebKeyType { + self.parameters.kty() + } + + fn algs(&self) -> &[JsonWebSignatureAlg] { + self.parameters.possible_algs() + } + + fn alg(&self) -> Option<&JsonWebSignatureAlg> { + self.alg.as_ref() + } + + fn use_(&self) -> Option<&JsonWebKeyUse> { + self.r#use.as_ref() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct JsonWebKeySet

{ + keys: Vec>, +} + +impl

Default for JsonWebKeySet

{ + fn default() -> Self { + Self { + keys: Vec::default(), + } + } +} + +pub type PublicJsonWebKeySet = JsonWebKeySet; +pub type PrivateJsonWebKeySet = + JsonWebKeySet; + +impl From for PublicJsonWebKeySet { + fn from(value: PrivateJsonWebKeySet) -> Self { + let keys = value + .keys + .into_iter() + .filter_map(|key: PrivateJsonWebKey| key.try_into().ok()) + .collect(); + Self { keys } + } +} + +impl

std::ops::Deref for JsonWebKeySet

{ + type Target = Vec>; + + fn deref(&self) -> &Self::Target { + &self.keys + } +} + +impl

JsonWebKeySet

{ + #[must_use] + pub fn new(keys: Vec>) -> Self { + Self { keys } + } + + /// Find the best key given the constraints + #[must_use] + pub fn find_key(&self, constraints: &ConstraintSet) -> Option<&JsonWebKey

> + where + P: ParametersInfo, + { + constraints.filter(&self.keys).pop() + } + + /// Find the list of keys which match the given constraints + #[must_use] + pub fn find_keys(&self, constraints: &ConstraintSet) -> Vec<&JsonWebKey

> + where + P: ParametersInfo, + { + constraints.filter(&self.keys) + } + + /// Find a key for the given algorithm. Returns `None` if no suitable key + /// was found. + #[must_use] + pub fn signing_key_for_algorithm(&self, alg: &JsonWebSignatureAlg) -> Option<&JsonWebKey

> + where + P: ParametersInfo, + { + let constraints = ConstraintSet::new([ + Constraint::alg(alg), + Constraint::use_(&mas_iana::jose::JsonWebKeyUse::Sig), + ]); + self.find_key(&constraints) + } + + /// Get a list of available signing algorithms for this [`JsonWebKeySet`] + #[must_use] + pub fn available_signing_algorithms(&self) -> Vec + where + P: ParametersInfo, + { + let mut algs: Vec<_> = self + .keys + .iter() + .flat_map(|key| key.params().possible_algs()) + .cloned() + .collect(); + algs.sort(); + algs.dedup(); + algs + } +} + +impl

FromIterator> for JsonWebKeySet

{ + fn from_iter>>(iter: T) -> Self { + let keys = iter.into_iter().collect(); + Self { keys } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::constraints::ConstraintSet; + + #[test] + fn load_google_keys() { + let jwks = serde_json::json!({ + "keys": [ + { + "alg": "RS256", + "kty": "RSA", + "n": "tCwhHOxX_ylh5kVwfVqW7QIBTIsPjkjCjVCppDrynuF_3msEdtEaG64eJUz84ODFNMCC0BQ57G7wrKQVWkdSDxWUEqGk2BixBiHJRWZdofz1WOBTdPVicvHW5Zl_aIt7uXWMdOp_SODw-O2y2f05EqbFWFnR2-1y9K8KbiOp82CD72ny1Jbb_3PxTs2Z0F4ECAtTzpDteaJtjeeueRjr7040JAjQ-5fpL5D1g8x14LJyVIo-FL_y94NPFbMp7UCi69CIfVHXFO8WYFz949og-47mWRrID5lS4zpx-QLuvNhUb_lSqmylUdQB3HpRdOcYdj3xwy4MHJuu7tTaf0AmCQ", + "use": "sig", + "kid": "d98f49bc6ca4581eae8dfadd494fce10ea23aab0", + "e": "AQAB" + }, + { + "use": "sig", + "kty": "RSA", + "kid": "03e84aed4ef4431014e8617567864c4efaaaede9", + "n": "ma2uRyBeSEOatGuDpCiV9oIxlDWix_KypDYuhQfEzqi_BiF4fV266OWfyjcABbam59aJMNvOnKW3u_eZM-PhMCBij5MZ-vcBJ4GfxDJeKSn-GP_dJ09rpDcILh8HaWAnPmMoi4DC0nrfE241wPISvZaaZnGHkOrfN_EnA5DligLgVUbrA5rJhQ1aSEQO_gf1raEOW3DZ_ACU3qhtgO0ZBG3a5h7BPiRs2sXqb2UCmBBgwyvYLDebnpE7AotF6_xBIlR-Cykdap3GHVMXhrIpvU195HF30ZoBU4dMd-AeG6HgRt4Cqy1moGoDgMQfbmQ48Hlunv9_Vi2e2CLvYECcBw", + "e": "AQAB", + "alg": "RS256" + } + ] + }); + + let jwks: PublicJsonWebKeySet = serde_json::from_value(jwks).unwrap(); + // Both keys are RSA public keys + for jwk in &jwks.keys { + let p = jwk.params().rsa().expect("an RSA key"); + rsa::RsaPublicKey::try_from(p).unwrap(); + } + + let constraints = ConstraintSet::default() + .use_(&JsonWebKeyUse::Sig) + .kty(&JsonWebKeyType::Rsa) + .alg(&JsonWebSignatureAlg::Rs256); + let candidates = constraints.filter(&jwks.keys); + assert_eq!(candidates.len(), 2); + + let constraints = ConstraintSet::default() + .use_(&JsonWebKeyUse::Sig) + .kty(&JsonWebKeyType::Rsa) + .kid("03e84aed4ef4431014e8617567864c4efaaaede9"); + let candidates = constraints.filter(&jwks.keys); + assert_eq!(candidates.len(), 1); + } + + #[test] + fn load_keycloak_keys() { + let jwks = serde_json::json!({ + "keys": [ + { + "kid": "SuGUPE9Sr-1Gha2NLse33r5NQu3XoS_I3Qds3bcmfQE", + "kty": "RSA", + "alg": "RS256", + "use": "sig", + "n": "j21ih2m1RPeTXtIPFas2ZclhW8v2RitLdXJTqOFviWonaSObUWNZUkVvIdDKDyJhU7caGPnz52zXX1Trhbbq1uoCalAuIPw9UgJUJhUhlH7lqaRtYdbOrOzXZ7kVsApe1OdlezgShnyMhW5ChEJXQrCkR_LktBJQ8-6ZBNLHx3ps-pQrpXky_XdYZM_I_f1R8z36gnXagklAMMNKciFRURBMAsPbOgaly-slEDdVcuNtcoccSYdo9kRS5wjQlK6LZ3lniJrLRkUMvN6ZQcMLUWMDpghH5bdbhaaOb28HQWwpRDEBIMIH9Fi9aiKxwHa5YAqW1yetOq_9XXyYiuP9G6hZozSnkkfAOzYFqfr92vIPHddVVUUVLvH8UL4u1o553uVtOExA_pJVRghfO0IPZhJ6rUaZR7krvUMdCYngGznuD_V2-TAL9Nu8YXHIrZSU4WBKIvQC2HDOogSjj5dNDBUuAmOhI2OjuLjiOXpRPlaGcMIIlLALwQ76gFTEhTDlRXar7oLU8wj1KHLkc6d__lwdBkR-2Fr4dAewW4bHVFsPeDSM_vJZpK0XACrNgrrNBax48_hOlK9YfzSopyVCHwewxmC743eNYWEhE9LY-cc3ZGK9tHXgQG2l1tOZ_JK9wo1HsIuu3gdl2SV3ZOs6Ggi812GMfrgijnthC7e4Mv8", + "e": "AQAB", + "x5c": [ + "MIIElTCCAn0CBgF95wE6HzANBgkqhkiG9w0BAQsFADAOMQwwCgYDVQQDDANkZXYwHhcNMjExMjIzMTExNDE3WhcNMzExMjIzMTExNTU3WjAOMQwwCgYDVQQDDANkZXYwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCPbWKHabVE95Ne0g8VqzZlyWFby/ZGK0t1clOo4W+JaidpI5tRY1lSRW8h0MoPImFTtxoY+fPnbNdfVOuFturW6gJqUC4g/D1SAlQmFSGUfuWppG1h1s6s7NdnuRWwCl7U52V7OBKGfIyFbkKEQldCsKRH8uS0ElDz7pkE0sfHemz6lCuleTL9d1hkz8j9/VHzPfqCddqCSUAww0pyIVFREEwCw9s6BqXL6yUQN1Vy421yhxxJh2j2RFLnCNCUrotneWeImstGRQy83plBwwtRYwOmCEflt1uFpo5vbwdBbClEMQEgwgf0WL1qIrHAdrlgCpbXJ606r/1dfJiK4/0bqFmjNKeSR8A7NgWp+v3a8g8d11VVRRUu8fxQvi7Wjnne5W04TED+klVGCF87Qg9mEnqtRplHuSu9Qx0JieAbOe4P9Xb5MAv027xhccitlJThYEoi9ALYcM6iBKOPl00MFS4CY6EjY6O4uOI5elE+VoZwwgiUsAvBDvqAVMSFMOVFdqvugtTzCPUocuRzp3/+XB0GRH7YWvh0B7BbhsdUWw94NIz+8lmkrRcAKs2Cus0FrHjz+E6Ur1h/NKinJUIfB7DGYLvjd41hYSET0tj5xzdkYr20deBAbaXW05n8kr3CjUewi67eB2XZJXdk6zoaCLzXYYx+uCKOe2ELt7gy/wIDAQABMA0GCSqGSIb3DQEBCwUAA4ICAQB+mzE9ZA/hX/GAM74ZXs+ZEjV+qzUsGNpHkXyzdRc1ic28Go5ujAIMxwwsJ4PUSmw6MjPpCKV3kSXoyc7kUDZ/NQ7gwanP4DN8wDq7GLGqT3QRzMLfVy+el2Vjwd3Q6BhXNAK/jPzv0DFu1GG4WCpc1PcM8p/zWkbKWf9u4nBl7RBsMddn7KLxV+D2Y2eGshZ81YVaJiKF9y+gpgyxBOOsTFITu8SxBpXSwBIP4jTv7NllicxI8G9mk87XX3DdA+NHPKsKj35RbDAXyMid8tMl4R3IQ34F3ADuquHpdAdfTNDSm5lwilyWjV35O+8mKA2n/3LAhfCNgxMU0m9Jm8kI/pu9qTXnIx+HMr8IsAMseGxl+dZ/jJjGGPw1VZhHhU78dN+DZlUSKOVjOSQF+8CGuCxMnOx7+leGafs6G6LtsF/vQvJBTB9DRlM3ag0hQRT2ZEXPWSvcz3ARXqWyaHTzhR4F/+rRX1CyBsCdG3b3iicjGp7EPeaqXEki1K3SNwwv1byeJfqP785auswpojpUYfp/J850VAfA4xuVvxK3xuJrvbpS4DR6JQPY0fs6g8JEDahYa6rSB8H9toLC2r92gerqcGFpEU8uHRHxm9QZjIyFh78LWqpfegz0HMjYqaULgZJxqqZH2sVIu+nPuKC7tIjYWtODR0A13Ar3lH8aZg==" + ], + "x5t": "fvgfH2gggONL7t4ZTvOdBpI94kM", + "x5t#S256": "uwHwO2crQ74jak2bmAeAt_4nrqGDQoElaiVvOlSGOOw" + }, + { + "kid": "7pW7bkOM27LQ-KJGHzT1dt3yBmhcj20xj7A-itsuY6U", + "kty": "RSA", + "alg": "RS384", + "use": "sig", + "n": "lI1actdwWsMY8BpY68x8No7fwokLTTcZ8-qpqF9CDwX40X70ql9JPqTpLAHp7H7byfO-8VqZVKYKdzFCLjaEqs6Vx6YYuu4BsM2RIDI2CmClngUE5RMXnaEj8XP-h8Q4FnGcXL47n2UNr9mbZSp85W0TWOLtMczuqwwJ2jcYkDFtvLY0UirioKzN5Vr29WdDiCm9i4jHvHE7W41LFCOFLOLxGOq9wLVRNRMRcC3YS6WlrfiMFkPQIGxzFH2OiW2iR9x8QHmxqrqdfidmFsosgG5_2tbX3Q5PnHjYTNHh-iY4uIQ6bsBj1Enoj5h5kudwtgHDyn9OAiljTqLMXsoK9KEZrjE8zPnxQtvfXLCby2CI69X5JZ2lQJCch4cn1eIxn-jJ9Z0aE9EML1Bfp6w5sKELXt1aRtu5HQ5IQ__y2sBJd91NdiBxAzCK5kZjhRIRtt57J5ZHTLsBeHvr2L7SwZ_FojrQly7mI5PMGthZoGoVAr-bJcInzICpcsLKWdW-C6jxhXwRtnJOuTizEOr33vnLohMlmJUZiomYnKv8MEFAmihK5GAHTJ-4QIUuUeC13Dl5aRJacxvoKfgR_zw9P6HCUb7Nq7uzN3oqUdmDYYng1OFVo-1liYuCLbH6ep5LTmAstQY3IjkIFKeY-tvSPdpC9y1TwaHqEktXckvRGx0", + "e": "AQAB", + "x5c": [ + "MIIElTCCAn0CBgF95wGjLjANBgkqhkiG9w0BAQsFADAOMQwwCgYDVQQDDANkZXYwHhcNMjExMjIzMTExNDQzWhcNMzExMjIzMTExNjIzWjAOMQwwCgYDVQQDDANkZXYwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCUjVpy13BawxjwGljrzHw2jt/CiQtNNxnz6qmoX0IPBfjRfvSqX0k+pOksAensftvJ877xWplUpgp3MUIuNoSqzpXHphi67gGwzZEgMjYKYKWeBQTlExedoSPxc/6HxDgWcZxcvjufZQ2v2ZtlKnzlbRNY4u0xzO6rDAnaNxiQMW28tjRSKuKgrM3lWvb1Z0OIKb2LiMe8cTtbjUsUI4Us4vEY6r3AtVE1ExFwLdhLpaWt+IwWQ9AgbHMUfY6JbaJH3HxAebGqup1+J2YWyiyAbn/a1tfdDk+ceNhM0eH6Jji4hDpuwGPUSeiPmHmS53C2AcPKf04CKWNOosxeygr0oRmuMTzM+fFC299csJvLYIjr1fklnaVAkJyHhyfV4jGf6Mn1nRoT0QwvUF+nrDmwoQte3VpG27kdDkhD//LawEl33U12IHEDMIrmRmOFEhG23nsnlkdMuwF4e+vYvtLBn8WiOtCXLuYjk8wa2FmgahUCv5slwifMgKlywspZ1b4LqPGFfBG2ck65OLMQ6vfe+cuiEyWYlRmKiZicq/wwQUCaKErkYAdMn7hAhS5R4LXcOXlpElpzG+gp+BH/PD0/ocJRvs2ru7M3eipR2YNhieDU4VWj7WWJi4Itsfp6nktOYCy1BjciOQgUp5j629I92kL3LVPBoeoSS1dyS9EbHQIDAQABMA0GCSqGSIb3DQEBCwUAA4ICAQCBKgIXOSH8cLgKHq1Q5Zn69YdVpC8W8gp3hfjqa9lpER8MHyZVw0isOzdICrNZdgsatq/uaYBMkc3LwxDRWJVN8AmKabqy6UDlAwHf7IUJJMcu3ODG+2tsy3U1SGIVWIffpOfv3F/gXxU76IXWnHiUzjCMYnWJg0Oy0G2oCDHk/7h82Dmq688UmPW+ycZhktjZ8lXqlopVZhjssTa48xJjtDdwN8OVmPGpV/uVzlDTCuYbyVWTYrEfnKwwVhzmAoIYc4XxDKZQ/z1zqE3HtIGrems7lGpgry55JMIRSYxoD2gg2YscDvuCnfzITwTPjijuyI7ocP6eA13FHriIcfHYEzKENUoEgWeybgs09JyIp3yE7YelL94vY4xJRVeL1jMmP5Wi6pM9cMKgQwkUzq7tmupkh9c6jF+tPStByDvD11ybJi5A/S2Rmer2qhlgnsml4NHkMZgIcWtokxoGmXoMcz6AOx31nRvvBHjC2emVnUmzojTCc5mPY3TRgzlAb+cQE/JIreZMfhfLwk4ny5dq+r4ya02fo7BrDA8oJJAP0gC82KNW5aZVpZSbkeRdogTVWdmiNYxvq95gI4ijLneYwSgWb1PM+CRhlNY7neJEv0VT5fbMd0XQZnxzSzQVymPiBHMEJBUul6UuxjVlJb7cdCtIty0zEWO3/uaEzqQl3w==" + ], + "x5t": "Fk9zR2uLwBS6fHJbxM08TjDhUi8", + "x5t#S256": "ZiBGLQCaqehbgYF5A2dicp7WaL-zE4UTbFYyHKXDU_o" + }, + { + "kid": "Jnf5fTyMpeiUyJnc3PHJaM9pR6VjWejv9RVyJgPugFs", + "kty": "RSA", + "alg": "RS512", + "use": "sig", + "n": "m3Y_aeHLL00X-bBPF3ySQ5ebOQ0dz40IQ4uWwWzL59zxn1AwzqrfrfAkKt_RJvJycfmy4zFeu89bNI86r6PtQVSvLqRYKo9UI4Y5jXs4HyvGvSL-DOXl8b8ybpo-o3bEiTgGOvIw2NGv49xT-_3SJ4Rba6awqVxkj334eZunrfvwYG9bjbAgPqWgMcuLVQNdNpytRHMB8Cjnd0SouL1dVxHlgHpYsZcRbsTsvPO1fRHcQRel44CgQRCZ08BvgETrF_9eATiRKBz18XbhaCZfSqh3a7IA-w9e236w6oD4ATOigeMHYZ0sfqKeoCsSd4rQ9kVc-U_EtL73_BVV7pmM4Xcl8JB8vzi_FMQVotzj5SgawylIxRdWUOGjyVFcUJ_u-DikoneVway0T4fXFJkWUflIoqf5-lHmMupb32q0E_pNL728yOlBfqm3bfJF9SF9w-h2SFMHWdRUzVOrtDRdrJVReGPPWvUHByALLL6B33FEcHDIcw4wqSfEmD6ypYJQxX8Er3_X9QFCgkn_rYUitUx90jOZ0n5vhubYnhiXX3RpeOCh9gF2O3h9Tv-DrynUO6OOgUSsBBbI-tGC5ebT51P0IJRkK3i4TkIYZnv7lj2auGWMC0-o7w24k_fG4U0EAr9N2cenR3Pepl6pjTa2g3y3C5_0LDUrcd67QPKl6ZE", + "e": "AQAB", + "x5c": [ + "MIIElTCCAn0CBgF95wHdoDANBgkqhkiG9w0BAQsFADAOMQwwCgYDVQQDDANkZXYwHhcNMjExMjIzMTExNDU4WhcNMzExMjIzMTExNjM4WjAOMQwwCgYDVQQDDANkZXYwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCbdj9p4csvTRf5sE8XfJJDl5s5DR3PjQhDi5bBbMvn3PGfUDDOqt+t8CQq39Em8nJx+bLjMV67z1s0jzqvo+1BVK8upFgqj1QjhjmNezgfK8a9Iv4M5eXxvzJumj6jdsSJOAY68jDY0a/j3FP7/dInhFtrprCpXGSPffh5m6et+/Bgb1uNsCA+paAxy4tVA102nK1EcwHwKOd3RKi4vV1XEeWAelixlxFuxOy887V9EdxBF6XjgKBBEJnTwG+AROsX/14BOJEoHPXxduFoJl9KqHdrsgD7D17bfrDqgPgBM6KB4wdhnSx+op6gKxJ3itD2RVz5T8S0vvf8FVXumYzhdyXwkHy/OL8UxBWi3OPlKBrDKUjFF1ZQ4aPJUVxQn+74OKSid5XBrLRPh9cUmRZR+Uiip/n6UeYy6lvfarQT+k0vvbzI6UF+qbdt8kX1IX3D6HZIUwdZ1FTNU6u0NF2slVF4Y89a9QcHIAssvoHfcURwcMhzDjCpJ8SYPrKlglDFfwSvf9f1AUKCSf+thSK1TH3SM5nSfm+G5tieGJdfdGl44KH2AXY7eH1O/4OvKdQ7o46BRKwEFsj60YLl5tPnU/QglGQreLhOQhhme/uWPZq4ZYwLT6jvDbiT98bhTQQCv03Zx6dHc96mXqmNNraDfLcLn/QsNStx3rtA8qXpkQIDAQABMA0GCSqGSIb3DQEBCwUAA4ICAQAf2H6GjobSvc50L+cXeizzG6rg6Sm3x31PB7AH7XlVI+cytWA0X04IhuX+9H2VdEqujSApY/WM9voneyEm1eC3L6p4StO7icB+H4GYctzY+KV0qlbH3iMQkz+xngOTaEj+c9lSZlG7FSlL7Eybjjlj9mLyNJv4aiW7lQCxTWu7RcFq+w2ogvR7iv4uwbY9SHO/Fs5qbwzNIP65W9abcZvEAZKXQ69jOZ01VhNqiIA2D0OstjLWTfGaO0WxrUxvBVRqB3a86qIIwHjatrqdoGasLLGz8bAU3rY2b/DwZ7VBljUuZ+7PlysSK3w22k6eQe5G+XgxSl4Mzn+6lzCdoXeSVUzvQZrk+JBaDTVN5V5fteHSjLcaGNwIg9qYOHdx7PBYhbHP/hXADSQH90xIMipG168NOGBaxw+ybCaD6Eg+PfsPGnXO0Wnnd0PN/Dz4LggTLBwlbWaIDltj++0Xxlf375MrK1A9mDkhcdAOzZtkBkTD9UeXqL6UD0R0CFHp0B+TQEZuOuKRMKmlA2eo8f8z70vGToYk5TW/lvi8Li44+Y7UGLlLirpOtfBI35TPLK0OGfLh1dfqnuFQACObk+Ia+ON//r203sSQYQf3Qcq7u5KC/S406W+dSJ+c7Cf+8piMVc42PhYemdrkEPgzuTmzTJga2HFQk8BCUwoL1euMdw==" + ], + "x5t": "bPku6_PBAoke1DpEcT0ghZYp6Fc", + "x5t#S256": "kIo7Hxj-A4jrwOBfo87c2kmAZzs87OHSd8tS4s_PGgk" + }, + { + "kid": "WerdZfF_9ZgxLyHepk92CsKAEubvCs3rIAAy6wrUZUc", + "kty": "RSA", + "alg": "PS256", + "use": "sig", + "n": "85fgcXq_tB48BI8oeF9gjeWqL1opGtHoXv4rmwaxwfwzFU2ywJWRIEjwcJ_ypMPdC1im_kz_VCqWZBFyXfpuaEFkcsIAlLLnklI2TPUD3SV5taV_TXA61fm59K59iJDJr9EaQ_j5WJRGRluJpAi_q55U1vBWAHtnweL9RveQ-Ykc_qhpCcGDIek3-tAvJtVCpKQb764tkvmBD3pUPYTdVKHW4TAp4wFcgcj78E-xWELfm0T1nr7kZu-mV9DGYBZhFIWkf0lm4KA6NVDwWe-d1k-20FpT1tNsugK2Zx7SX2N5ytM2bCLH88Fcphvh9Bw_t7GgtZ9PvihJXdJcHR8nqlCsRMsGpeS6tnEl4E8StcTccgOkw1n2FJ-xxLM9eMOcfY--B9eKSaLRjLrhvWfa5-MGpB5JFrB4Rv17SD02Uoz1lwogCXPzTbKkBJhiA-YDinTRyGzyHTNXWsrmOLXrVRXUqdNYG32mpy1m3cSpoz9fOWne2dKKj9eawxFHa-GCzdfX3JBfgVKGGgaL5E_HlkJxx9OHNfQQQ4_OjyzqQGCoPG7jDCn9svb7hOE2epmYywShCgCsL_DZmTm3OdVWMLZ6oi77SIytWSx8QDy5KNCx3YsSLDg7sWv6t58gerWv1gkjhFzhyi3mqsw53WkeUyInrLoDYzEPkjWv3kSKQeM", + "e": "AQAB", + "x5c": [ + "MIIElTCCAn0CBgF95wIdDjANBgkqhkiG9w0BAQsFADAOMQwwCgYDVQQDDANkZXYwHhcNMjExMjIzMTExNTE1WhcNMzExMjIzMTExNjU1WjAOMQwwCgYDVQQDDANkZXYwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDzl+Bxer+0HjwEjyh4X2CN5aovWika0ehe/iubBrHB/DMVTbLAlZEgSPBwn/Kkw90LWKb+TP9UKpZkEXJd+m5oQWRywgCUsueSUjZM9QPdJXm1pX9NcDrV+bn0rn2IkMmv0RpD+PlYlEZGW4mkCL+rnlTW8FYAe2fB4v1G95D5iRz+qGkJwYMh6Tf60C8m1UKkpBvvri2S+YEPelQ9hN1UodbhMCnjAVyByPvwT7FYQt+bRPWevuRm76ZX0MZgFmEUhaR/SWbgoDo1UPBZ753WT7bQWlPW02y6ArZnHtJfY3nK0zZsIsfzwVymG+H0HD+3saC1n0++KEld0lwdHyeqUKxEywal5Lq2cSXgTxK1xNxyA6TDWfYUn7HEsz14w5x9j74H14pJotGMuuG9Z9rn4wakHkkWsHhG/XtIPTZSjPWXCiAJc/NNsqQEmGID5gOKdNHIbPIdM1dayuY4tetVFdSp01gbfaanLWbdxKmjP185ad7Z0oqP15rDEUdr4YLN19fckF+BUoYaBovkT8eWQnHH04c19BBDj86PLOpAYKg8buMMKf2y9vuE4TZ6mZjLBKEKAKwv8NmZObc51VYwtnqiLvtIjK1ZLHxAPLko0LHdixIsODuxa/q3nyB6ta/WCSOEXOHKLeaqzDndaR5TIiesugNjMQ+SNa/eRIpB4wIDAQABMA0GCSqGSIb3DQEBCwUAA4ICAQDtW7hL3dWY0Nu87SkAPweBLocyI/S2/XZogBByzqdEWZru+26xQoUacqgYbrmQ6frQfWwlfpuzp7HBheDAHVobjlhl2jUQ7xO5vzTiB1bd/X1cQgOdTHosqiyTXLRBJKr3GQyfjrS3ruWKScGg5Y4jYGbsAoO3cNProddFeLbak0aQXGkhyWib2CzqtIpBA9Zy7EJYIWd5O+tExNIv+mjhSZZ6s3qdWXo/4RkVzBeGx5PApdoI/B7y0vwg4Dlt8qB9JcV9WL4nzI4s8foPMXuXgg+HJllB+NkSnTlQj77oU3pbrBoYgVhEdbfYkQuIdwYOWBQi/hdmV0YjUQQTAjYKBFKWQWCoAVKnfMpbDkdjN8KhOzohZ7KEahvHsnFt/PnS5MlFseZN9e6k4MB96EQ4fem7n/sPx4zqvhZMrPCaUT616hfCTa3DPoHzi2CxebE7GE95veQOtk9jCsXEbqKPvZ83/dfz5ftWu5wGHnhIK9S5sCCgjo2RA8bCLBl6/tBpmE0BwWqQqSZEs4zyXTplko822aJyxJtYprmDK0Ktxm6IEjSpEDCLuirnpQ0+Z8w19Key58Kx+OhNHczJK9wEaygKBQC1vvPV8ZvcHOx4XJgL8QwbPhaR5706YRfXTBceK2aw+oWzoNLJ4X7B2LB9IA84pJZKW+VfmnBz52iiqw==" + ], + "x5t": "Xdy9viGu6isFknWeThJbh2_r4Qo", + "x5t#S256": "-toFY0ysJ3uopRPDNIQBo2VV_XT5YkniW2I-6XK_2oc" + }, + { + "kid": "JjGFU4NwBkjaNRmIEw5BpoggXtG8dsl4s7gs29eYvno", + "kty": "RSA", + "alg": "PS384", + "use": "sig", + "n": "h3RNtfVqZPTQuFYBN54gOgcLX7bK-3qUyXstFso_V09RCHLHbFZV_czEC30lRQ6U5QeZ7iFpu7GbiM1csBk4HqhQ2v0TnjlQxIv9-71VV1JPZHrKsDFZlSr4HlZhkt6myBH16aDBT56U8pKg4oAVkoYS4dpzsR0q30zzrKAMgHRLTYWbCaGGGa1BuEUF9WgUhVnuiMu4ay9Tv0auu1UsdTkXjdR2YcWv2AihvFb4xYUSMBQr0bvUeMF_AAJ0B0VrGWIb51nARO2PNimKviHnFTrlaOyFJsnzwiiijuaOx2HZMQfcObzTz4Hx_YYIexOS83bYYkyGgvgUdu0wqls7ChgaZ_qiQdNnr_RWahIN2iVhjyOJuqsFsXufvHYo0nB1BFm1gnDHgYXdJIrSPql4g9gh1NZD_P0PuniPq3jvPoiQJ2u_9a8RDe9Scb_KzRgrBk0tkaXELDw1Q7ccJx9HUUbTxNkzNtZ6Z4MiKT4n0Bx4joglnL1BXvM5yrlO89brXAmfZgx6OmH7Dractz_Bny6QUHwF5vLMhMuVXsC5dU6UbkSZq82S5SnwnLHAe4JBOC-FTB08wAKgQXat16MIqmrBuKVtdNSshAxMk0wd_jPe-G4A_2RJ6pXSrQOkUFNPrOfV_PQMqI92zCYbIByWEwdfQAkavR2HC0-iDd202NM", + "e": "AQAB", + "x5c": [ + "MIIElTCCAn0CBgF95wJajzANBgkqhkiG9w0BAQsFADAOMQwwCgYDVQQDDANkZXYwHhcNMjExMjIzMTExNTMwWhcNMzExMjIzMTExNzEwWjAOMQwwCgYDVQQDDANkZXYwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCHdE219Wpk9NC4VgE3niA6Bwtftsr7epTJey0Wyj9XT1EIcsdsVlX9zMQLfSVFDpTlB5nuIWm7sZuIzVywGTgeqFDa/ROeOVDEi/37vVVXUk9kesqwMVmVKvgeVmGS3qbIEfXpoMFPnpTykqDigBWShhLh2nOxHSrfTPOsoAyAdEtNhZsJoYYZrUG4RQX1aBSFWe6Iy7hrL1O/Rq67VSx1OReN1HZhxa/YCKG8VvjFhRIwFCvRu9R4wX8AAnQHRWsZYhvnWcBE7Y82KYq+IecVOuVo7IUmyfPCKKKO5o7HYdkxB9w5vNPPgfH9hgh7E5LzdthiTIaC+BR27TCqWzsKGBpn+qJB02ev9FZqEg3aJWGPI4m6qwWxe5+8dijScHUEWbWCcMeBhd0kitI+qXiD2CHU1kP8/Q+6eI+reO8+iJAna7/1rxEN71Jxv8rNGCsGTS2RpcQsPDVDtxwnH0dRRtPE2TM21npngyIpPifQHHiOiCWcvUFe8znKuU7z1utcCZ9mDHo6YfsOtpy3P8GfLpBQfAXm8syEy5VewLl1TpRuRJmrzZLlKfCcscB7gkE4L4VMHTzAAqBBdq3XowiqasG4pW101KyEDEyTTB3+M974bgD/ZEnqldKtA6RQU0+s59X89Ayoj3bMJhsgHJYTB19ACRq9HYcLT6IN3bTY0wIDAQABMA0GCSqGSIb3DQEBCwUAA4ICAQAoudkN4cTAnT2b7cd/JklLFLBnw+mwSgj0ZYyRByBiC0AXU+LmM+D1Bs0TRqXKICBZ2dxKRr8Z1PdQe8BghWcl84iLXEjHVdw08/xVaQ5GKcGLOfSRG+3Suj6UyZfwcMJtX4GO919fX10mAlk6ySHe6SViSVMup5ePwA0C7Jws9/aXNLIvw82hIX8IVM1kuuu3DICQlr1nsvbu6XVQT5kdhIpApr+IDrBvNFWKPdH+vA8Kxb8wkhk9HIUbAi3WqftHoiI8Qq92BYcB5gjwocAkzmrDDoAulEM24+IJoK87DWEeC0Vu9kOB7i5PKXqUANJ7ebQJJhgXy+xNq1Alh4f95mqolXCxdo1jJi/OFExLDr93Fk5QVRQxi2aSEDkoz/h7stzuUvvTyT75pJAILSL+xv8Gd/bYhL5lfCXcHA9uPDQwM/9gnA1ojIdF1bvgaEo2r1xoY/LAScTB0nzvRh1EVoZYxBHid+79MJWQq0vpJ58pyKcxgKaoD1pUQ2brAlYFNflNiMN18VnCF7vnY8Ol9Po881ee2TWLex0i5cLREo4fvPNg0QgoaQvDqlvJqr1nJll/Mzv2w9s3agQxPwKRkTOTb4jNOV23Uy0SbxBD42EOllLmUN897ra3pdmacHHMatw75Sfcu4WhuMrN13RzVUARMjFN+nNI8i7ay9WJOA==" + ], + "x5t": "4ovci1k_HPeLoL2PhUrmoDlLQhU", + "x5t#S256": "PJsKbXoQ7tZoR7aRDli60V65BPtO-Q7QSpk5P5hDcLY" + }, + { + "kid": "zesnP0SwjgVGBU5RPhqccF0W4BbMkbwtZpjAeTAgwz8", + "kty": "RSA", + "alg": "PS512", + "use": "sig", + "n": "x4NHNpmzOgqWgQGsiWTpyhdIkSSiO0hMKr_5oNNecp254CSO_zEPS6wWKMNwwZRteKIPzPafCkXvmGEuQo716CL9OP5T8BR25sXkws0llygfbbSK2dTWVN4lhM1Rm6zFJ4aK0BZo6EXDp0E0Od8SQSN7FooRAWOiO7HvjgpIdRyqkANElBSL7aNdsPP7dgVMua5P6MNfVjKCe93C-iqsOVadUV5UM3oblf6M_KkDV9GNr6oAizfrXHpPnHjG29u-DSsmCbLimgZaJ3LDnLrmzxbbl9b4mHJQqe00rNDUF6Q6BmmDgJGDMdPH4J8i6w_1z4Xll8Ul-UGHS6rJZeTVsEdKGSOoIbhQa9iuGxC_I_YIjkVbV3O8LcYBzDKetzups4R5CVFpwvAK03UCdM7yLkbDglWcSOYtbPVBafumCzyjWX9u7CpBAcVWe9KpEMVCYgi90TSkX2Vw1bPP07mTBFmK0fwmU2ZlDR0S9Q2NT9St7zWP6teuOeue7PAFlPUVotFdoh8ltZVLEfUTo81E1tiNycDCy9QTP9CzwplqpPIkmTdjmMCO6lollLrTm9SuXGp2FSUdE43tYEzRGNqsGpcwskkvzQWtl7bETaS5vCwPH76k6qGf-TpOHnOH1G7vDzDkewqJ-oscqwkdw4ONo_KxT-CGwv-JwMoSXWEtMKE", + "e": "AQAB", + "x5c": [ + "MIIElTCCAn0CBgF95wKLoDANBgkqhkiG9w0BAQsFADAOMQwwCgYDVQQDDANkZXYwHhcNMjExMjIzMTExNTQzWhcNMzExMjIzMTExNzIzWjAOMQwwCgYDVQQDDANkZXYwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDHg0c2mbM6CpaBAayJZOnKF0iRJKI7SEwqv/mg015ynbngJI7/MQ9LrBYow3DBlG14og/M9p8KRe+YYS5CjvXoIv04/lPwFHbmxeTCzSWXKB9ttIrZ1NZU3iWEzVGbrMUnhorQFmjoRcOnQTQ53xJBI3sWihEBY6I7se+OCkh1HKqQA0SUFIvto12w8/t2BUy5rk/ow19WMoJ73cL6Kqw5Vp1RXlQzehuV/oz8qQNX0Y2vqgCLN+tcek+ceMbb274NKyYJsuKaBloncsOcuubPFtuX1viYclCp7TSs0NQXpDoGaYOAkYMx08fgnyLrD/XPheWXxSX5QYdLqsll5NWwR0oZI6ghuFBr2K4bEL8j9giORVtXc7wtxgHMMp63O6mzhHkJUWnC8ArTdQJ0zvIuRsOCVZxI5i1s9UFp+6YLPKNZf27sKkEBxVZ70qkQxUJiCL3RNKRfZXDVs8/TuZMEWYrR/CZTZmUNHRL1DY1P1K3vNY/q1645657s8AWU9RWi0V2iHyW1lUsR9ROjzUTW2I3JwMLL1BM/0LPCmWqk8iSZN2OYwI7qWiWUutOb1K5canYVJR0Tje1gTNEY2qwalzCySS/NBa2XtsRNpLm8LA8fvqTqoZ/5Ok4ec4fUbu8PMOR7Con6ixyrCR3Dg42j8rFP4IbC/4nAyhJdYS0woQIDAQABMA0GCSqGSIb3DQEBCwUAA4ICAQCr+GGCVS/sBHukLZay8WlBXtowJ6qyX8hMFClDGDN9/c3mUbLJsCCVN6Jbr33BgNZ/ZuvLhUvhWGPlOXUB3Rf+qRzNEzoLVwanw2yCUEKFi6AvuBUY9twNnifH4y1Cg34NVaZoPvQ0hlOLGYl9CCxen7VMLJ5QbTC8H3fPX1prWOic5x46Bu7IqoEqZtDszt8F+uteruRsHVHCiWx5dW7goeIa8YsUK0A4mnOy5kViSvs5L6Kq0N5uCB9EDu/Ew5R0/mi/UTm5L8CpzQig1pmvDtIy7ZnosHu7zYGSQiR04jn3Od0rdWzTCcs8W79+ewgJ0bdYmfvSnVehs1BR+cjivzBqMWMqdyz6eQXCy/esiG5KDIxH4F0HGLiiwXqHUYjJPex8TId+fz0MFScrEN5fjE+XltGzsPwlcgnAqE0pN0ExJSHwzBHNkJJQpjHrsEurWn9QGBqD75Vt9yVeHE8MZ4zMGj3ZkRmn1x6wVBdv1V12P3e4b8V5aG02FbREkJzFTXtGyDHtw/hlWGz9M9w0c5TAI6xYPa1gS6/Fw95J6S0V3n3JH+xqi6yv2H2cQHukFxFSPJW1cc/hh5DJ4Ag8+pKuO1Vdo9p+DltaGLWBabON7GZZojlYdx2WtBZK9CMRgrxobg+OBA44AHkiWkhflrqGLYul866wiNu6zLEfdQ==" + ], + "x5t": "0lMdqEAhOWfUXDivtS-KwPvwKNY", + "x5t#S256": "aOjQ1awJmcaF7Yiz75ifjBKbjr4Eo-Ha5uNMi-TtuGw" + }, + { + "kid": "VlsIs1LssBo6r8EuXJo81rDEoTYpUjiMkeq_PlapKfY", + "kty": "EC", + "alg": "ES256", + "use": "sig", + "crv": "P-256", + "x": "3kqy7us0mepJJblWwj0Exg2S7PtWaJvB7SI_ptg0jrA", + "y": "S5Z8d4AfCvRL-hUd6Pv-L3tH6H9T4RIwO2tvBS0hj1A" + }, + { + "kid": "1yWLiqf8sa-em0hSbtZEjKmrardmQdYLR9gpzsypMCU", + "kty": "EC", + "alg": "ES384", + "use": "sig", + "crv": "P-384", + "x": "i4YYGQZd5QQ1JpUXcrZe5wpCid3pqFLnzxxy89Chn-NQ1oYDPTP2M8V9sfazeuB0", + "y": "xf4qN2ZuMLVh4GmRVt1PHhQooB2o61pF0lHrBlIod5hVamiRtUo_Np9PikPD8Uap" + }, + { + "kid": "V5EwcLp9vmwAnstzI1Ndba-iWkX5oTBHK7GnYTyfuOE", + "kty": "EC", + "alg": "ES512", + "use": "sig", + "crv": "P-521", + "x": "rScgdd_n2cHLyzZvP8zw0u9vQyhu0VsbfQypheS7aDoHRLcXccPQTsmrQLrLuKX8PPkITjL_BJDSm7Bo8gv5Sd4", + "y": "Vu3rTFNn_9zWTki95UGT1Bd9PN84KDXmttCrJ1bsYHTWQCaEONk8iwA3U6mEDrg4xtZSTXXKCFdFP13ONWB9oZ4" + } + ] + }); + + let jwks: PublicJsonWebKeySet = serde_json::from_value(jwks).unwrap(); + // The first 6 keys are RSA, 7th is P-256 + let mut keys = jwks.keys.into_iter(); + rsa::RsaPublicKey::try_from(keys.next().unwrap().params().rsa().unwrap()).unwrap(); + rsa::RsaPublicKey::try_from(keys.next().unwrap().params().rsa().unwrap()).unwrap(); + rsa::RsaPublicKey::try_from(keys.next().unwrap().params().rsa().unwrap()).unwrap(); + rsa::RsaPublicKey::try_from(keys.next().unwrap().params().rsa().unwrap()).unwrap(); + rsa::RsaPublicKey::try_from(keys.next().unwrap().params().rsa().unwrap()).unwrap(); + rsa::RsaPublicKey::try_from(keys.next().unwrap().params().rsa().unwrap()).unwrap(); + // 7th is P-256 + elliptic_curve::PublicKey::::try_from( + keys.next().unwrap().params().ec().unwrap(), + ) + .unwrap(); + // 8th is P-384 + elliptic_curve::PublicKey::::try_from( + keys.next().unwrap().params().ec().unwrap(), + ) + .unwrap(); + // 8th is P-521, but we don't support it yet + keys.next().unwrap().params().ec().unwrap(); + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwk/private_parameters.rs b/matrix-authentication-service/crates/jose/src/jwk/private_parameters.rs new file mode 100644 index 00000000..6518e5ba --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwk/private_parameters.rs @@ -0,0 +1,359 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_iana::jose::{ + JsonWebKeyEcEllipticCurve, JsonWebKeyOkpEllipticCurve, JsonWebKeyType, JsonWebSignatureAlg, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +use super::{ParametersInfo, public_parameters::JsonWebKeyPublicParameters}; +use crate::base64::Base64UrlNoPad; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(tag = "kty")] +pub enum JsonWebKeyPrivateParameters { + #[serde(rename = "oct")] + Oct(OctPrivateParameters), + + #[serde(rename = "RSA")] + Rsa(RsaPrivateParameters), + + #[serde(rename = "EC")] + Ec(EcPrivateParameters), + + #[serde(rename = "OKP")] + Okp(OkpPrivateParameters), +} + +impl JsonWebKeyPrivateParameters { + #[must_use] + pub const fn oct(&self) -> Option<&OctPrivateParameters> { + match self { + Self::Oct(params) => Some(params), + _ => None, + } + } + + #[must_use] + pub const fn rsa(&self) -> Option<&RsaPrivateParameters> { + match self { + Self::Rsa(params) => Some(params), + _ => None, + } + } + + #[must_use] + pub const fn ec(&self) -> Option<&EcPrivateParameters> { + match self { + Self::Ec(params) => Some(params), + _ => None, + } + } + + #[must_use] + pub const fn okp(&self) -> Option<&OkpPrivateParameters> { + match self { + Self::Okp(params) => Some(params), + _ => None, + } + } +} + +impl ParametersInfo for JsonWebKeyPrivateParameters { + fn kty(&self) -> JsonWebKeyType { + match self { + Self::Oct(_) => JsonWebKeyType::Oct, + Self::Rsa(_) => JsonWebKeyType::Rsa, + Self::Ec(_) => JsonWebKeyType::Ec, + Self::Okp(_) => JsonWebKeyType::Okp, + } + } + + fn possible_algs(&self) -> &[JsonWebSignatureAlg] { + match self { + JsonWebKeyPrivateParameters::Oct(p) => p.possible_algs(), + JsonWebKeyPrivateParameters::Rsa(p) => p.possible_algs(), + JsonWebKeyPrivateParameters::Ec(p) => p.possible_algs(), + JsonWebKeyPrivateParameters::Okp(p) => p.possible_algs(), + } + } +} + +#[derive(Debug, Error)] +#[error("can't extract a public key out of a symetric key")] +pub struct SymetricKeyError; + +impl TryFrom for JsonWebKeyPublicParameters { + type Error = SymetricKeyError; + + fn try_from(value: JsonWebKeyPrivateParameters) -> Result { + match value { + JsonWebKeyPrivateParameters::Oct(_) => Err(SymetricKeyError), + JsonWebKeyPrivateParameters::Rsa(p) => Ok(JsonWebKeyPublicParameters::Rsa(p.into())), + JsonWebKeyPrivateParameters::Ec(p) => Ok(JsonWebKeyPublicParameters::Ec(p.into())), + JsonWebKeyPrivateParameters::Okp(p) => Ok(JsonWebKeyPublicParameters::Okp(p.into())), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct OctPrivateParameters { + /// Key Value + #[schemars(with = "String")] + k: Base64UrlNoPad, +} + +impl ParametersInfo for OctPrivateParameters { + fn kty(&self) -> JsonWebKeyType { + JsonWebKeyType::Oct + } + + fn possible_algs(&self) -> &[JsonWebSignatureAlg] { + &[ + JsonWebSignatureAlg::Hs256, + JsonWebSignatureAlg::Hs384, + JsonWebSignatureAlg::Hs512, + ] + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct RsaPrivateParameters { + /// Modulus + #[schemars(with = "String")] + n: Base64UrlNoPad, + + /// Exponent + #[schemars(with = "String")] + e: Base64UrlNoPad, + + /// Private Exponent + #[schemars(with = "String")] + d: Base64UrlNoPad, + + /// First Prime Factor + #[schemars(with = "String")] + p: Base64UrlNoPad, + + /// Second Prime Factor + #[schemars(with = "String")] + q: Base64UrlNoPad, + + /// First Factor CRT Exponent + #[schemars(with = "String")] + dp: Base64UrlNoPad, + + /// Second Factor CRT Exponent + #[schemars(with = "String")] + dq: Base64UrlNoPad, + + /// First CRT Coefficient + #[schemars(with = "String")] + qi: Base64UrlNoPad, + + /// Other Primes Info + #[serde(skip_serializing_if = "Option::is_none")] + oth: Option>, +} + +impl ParametersInfo for RsaPrivateParameters { + fn kty(&self) -> JsonWebKeyType { + JsonWebKeyType::Rsa + } + + fn possible_algs(&self) -> &[JsonWebSignatureAlg] { + &[ + JsonWebSignatureAlg::Rs256, + JsonWebSignatureAlg::Rs384, + JsonWebSignatureAlg::Rs512, + JsonWebSignatureAlg::Ps256, + JsonWebSignatureAlg::Ps384, + JsonWebSignatureAlg::Ps512, + ] + } +} + +impl From for super::public_parameters::RsaPublicParameters { + fn from(params: RsaPrivateParameters) -> Self { + Self::new(params.n, params.e) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +struct RsaOtherPrimeInfo { + /// Prime Factor + #[schemars(with = "String")] + r: Base64UrlNoPad, + + /// Factor CRT Exponent + #[schemars(with = "String")] + d: Base64UrlNoPad, + + /// Factor CRT Coefficient + #[schemars(with = "String")] + t: Base64UrlNoPad, +} + +mod rsa_impls { + use rsa::{BigUint, RsaPrivateKey}; + + use super::RsaPrivateParameters; + + impl TryFrom for RsaPrivateKey { + type Error = rsa::errors::Error; + fn try_from(value: RsaPrivateParameters) -> Result { + Self::try_from(&value) + } + } + + impl TryFrom<&RsaPrivateParameters> for RsaPrivateKey { + type Error = rsa::errors::Error; + + #[allow(clippy::many_single_char_names)] + fn try_from(value: &RsaPrivateParameters) -> Result { + let n = BigUint::from_bytes_be(value.n.as_bytes()); + let e = BigUint::from_bytes_be(value.e.as_bytes()); + let d = BigUint::from_bytes_be(value.d.as_bytes()); + + let primes = [&value.p, &value.q] + .into_iter() + .chain(value.oth.iter().flatten().map(|o| &o.r)) + .map(|i| BigUint::from_bytes_be(i.as_bytes())) + .collect(); + + let key = RsaPrivateKey::from_components(n, e, d, primes)?; + + key.validate()?; + + Ok(key) + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct EcPrivateParameters { + pub(crate) crv: JsonWebKeyEcEllipticCurve, + + #[schemars(with = "String")] + x: Base64UrlNoPad, + + #[schemars(with = "String")] + y: Base64UrlNoPad, + + #[schemars(with = "String")] + d: Base64UrlNoPad, +} + +impl ParametersInfo for EcPrivateParameters { + fn kty(&self) -> JsonWebKeyType { + JsonWebKeyType::Ec + } + + fn possible_algs(&self) -> &[JsonWebSignatureAlg] { + match self.crv { + JsonWebKeyEcEllipticCurve::P256 => &[JsonWebSignatureAlg::Es256], + JsonWebKeyEcEllipticCurve::P384 => &[JsonWebSignatureAlg::Es384], + JsonWebKeyEcEllipticCurve::P521 => &[JsonWebSignatureAlg::Es512], + JsonWebKeyEcEllipticCurve::Secp256K1 => &[JsonWebSignatureAlg::Es256K], + _ => &[], + } + } +} + +impl From for super::public_parameters::EcPublicParameters { + fn from(params: EcPrivateParameters) -> Self { + Self::new(params.crv, params.x, params.y) + } +} + +mod ec_impls { + use elliptic_curve::{ + AffinePoint, Curve, SecretKey, + sec1::{Coordinates, FromEncodedPoint, ModulusSize, ToEncodedPoint}, + }; + + use super::{super::JwkEcCurve, EcPrivateParameters}; + use crate::base64::Base64UrlNoPad; + + impl TryFrom for SecretKey + where + C: Curve, + { + type Error = elliptic_curve::Error; + fn try_from(value: EcPrivateParameters) -> Result { + Self::try_from(&value) + } + } + + impl TryFrom<&EcPrivateParameters> for SecretKey + where + C: Curve, + { + type Error = elliptic_curve::Error; + + fn try_from(value: &EcPrivateParameters) -> Result { + SecretKey::from_slice(value.d.as_bytes()) + } + } + + impl From> for EcPrivateParameters + where + C: elliptic_curve::CurveArithmetic + JwkEcCurve, + AffinePoint: FromEncodedPoint + ToEncodedPoint, + C::FieldBytesSize: ModulusSize, + { + fn from(key: SecretKey) -> Self { + (&key).into() + } + } + + impl From<&SecretKey> for EcPrivateParameters + where + C: elliptic_curve::CurveArithmetic + JwkEcCurve, + AffinePoint: FromEncodedPoint + ToEncodedPoint, + C::FieldBytesSize: ModulusSize, + { + fn from(key: &SecretKey) -> Self { + let point = key.public_key().to_encoded_point(false); + let Coordinates::Uncompressed { x, y } = point.coordinates() else { + unreachable!() + }; + let d = key.to_bytes(); + EcPrivateParameters { + crv: C::CRV, + x: Base64UrlNoPad::new(x.to_vec()), + y: Base64UrlNoPad::new(y.to_vec()), + d: Base64UrlNoPad::new(d.to_vec()), + } + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct OkpPrivateParameters { + crv: JsonWebKeyOkpEllipticCurve, + + #[schemars(with = "String")] + x: Base64UrlNoPad, +} + +impl ParametersInfo for OkpPrivateParameters { + fn kty(&self) -> JsonWebKeyType { + JsonWebKeyType::Okp + } + + fn possible_algs(&self) -> &[JsonWebSignatureAlg] { + &[JsonWebSignatureAlg::EdDsa] + } +} + +impl From for super::public_parameters::OkpPublicParameters { + fn from(params: OkpPrivateParameters) -> Self { + Self::new(params.crv, params.x) + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwk/public_parameters.rs b/matrix-authentication-service/crates/jose/src/jwk/public_parameters.rs new file mode 100644 index 00000000..f4b57c53 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwk/public_parameters.rs @@ -0,0 +1,346 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_iana::jose::{ + JsonWebKeyEcEllipticCurve, JsonWebKeyOkpEllipticCurve, JsonWebKeyType, JsonWebSignatureAlg, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +use super::ParametersInfo; +use crate::{base64::Base64UrlNoPad, jwk::Thumbprint}; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(tag = "kty")] +pub enum JsonWebKeyPublicParameters { + #[serde(rename = "RSA")] + Rsa(RsaPublicParameters), + + #[serde(rename = "EC")] + Ec(EcPublicParameters), + + #[serde(rename = "OKP")] + Okp(OkpPublicParameters), +} + +impl JsonWebKeyPublicParameters { + #[must_use] + pub const fn rsa(&self) -> Option<&RsaPublicParameters> { + match self { + Self::Rsa(params) => Some(params), + _ => None, + } + } + + #[must_use] + pub const fn ec(&self) -> Option<&EcPublicParameters> { + match self { + Self::Ec(params) => Some(params), + _ => None, + } + } + + #[must_use] + pub const fn okp(&self) -> Option<&OkpPublicParameters> { + match self { + Self::Okp(params) => Some(params), + _ => None, + } + } +} + +impl Thumbprint for JsonWebKeyPublicParameters { + fn thumbprint_prehashed(&self) -> String { + match self { + JsonWebKeyPublicParameters::Rsa(RsaPublicParameters { n, e }) => { + format!("{{\"e\":\"{e}\",\"kty\":\"RSA\",\"n\":\"{n}\"}}") + } + JsonWebKeyPublicParameters::Ec(EcPublicParameters { crv, x, y }) => { + format!("{{\"crv\":\"{crv}\",\"kty\":\"EC\",\"x\":\"{x}\",\"y\":\"{y}\"}}") + } + JsonWebKeyPublicParameters::Okp(OkpPublicParameters { crv, x }) => { + format!("{{\"crv\":\"{crv}\",\"kty\":\"OKP\",\"x\":\"{x}\"}}") + } + } + } +} + +impl ParametersInfo for JsonWebKeyPublicParameters { + fn kty(&self) -> JsonWebKeyType { + match self { + Self::Rsa(_) => JsonWebKeyType::Rsa, + Self::Ec(_) => JsonWebKeyType::Ec, + Self::Okp(_) => JsonWebKeyType::Okp, + } + } + + fn possible_algs(&self) -> &[JsonWebSignatureAlg] { + match self { + JsonWebKeyPublicParameters::Rsa(p) => p.possible_algs(), + JsonWebKeyPublicParameters::Ec(p) => p.possible_algs(), + JsonWebKeyPublicParameters::Okp(p) => p.possible_algs(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct RsaPublicParameters { + #[schemars(with = "String")] + n: Base64UrlNoPad, + + #[schemars(with = "String")] + e: Base64UrlNoPad, +} + +impl ParametersInfo for RsaPublicParameters { + fn kty(&self) -> JsonWebKeyType { + JsonWebKeyType::Rsa + } + + fn possible_algs(&self) -> &[JsonWebSignatureAlg] { + &[ + JsonWebSignatureAlg::Rs256, + JsonWebSignatureAlg::Rs384, + JsonWebSignatureAlg::Rs512, + JsonWebSignatureAlg::Ps256, + JsonWebSignatureAlg::Ps384, + JsonWebSignatureAlg::Ps512, + ] + } +} + +impl RsaPublicParameters { + pub const fn new(n: Base64UrlNoPad, e: Base64UrlNoPad) -> Self { + Self { n, e } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct EcPublicParameters { + pub(crate) crv: JsonWebKeyEcEllipticCurve, + + #[schemars(with = "String")] + x: Base64UrlNoPad, + + #[schemars(with = "String")] + y: Base64UrlNoPad, +} + +impl EcPublicParameters { + pub const fn new(crv: JsonWebKeyEcEllipticCurve, x: Base64UrlNoPad, y: Base64UrlNoPad) -> Self { + Self { crv, x, y } + } +} + +impl ParametersInfo for EcPublicParameters { + fn kty(&self) -> JsonWebKeyType { + JsonWebKeyType::Ec + } + + fn possible_algs(&self) -> &[JsonWebSignatureAlg] { + match &self.crv { + JsonWebKeyEcEllipticCurve::P256 => &[JsonWebSignatureAlg::Es256], + JsonWebKeyEcEllipticCurve::P384 => &[JsonWebSignatureAlg::Es384], + JsonWebKeyEcEllipticCurve::P521 => &[JsonWebSignatureAlg::Es512], + JsonWebKeyEcEllipticCurve::Secp256K1 => &[JsonWebSignatureAlg::Es256K], + _ => &[], + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +pub struct OkpPublicParameters { + crv: JsonWebKeyOkpEllipticCurve, + + #[schemars(with = "String")] + x: Base64UrlNoPad, +} + +impl ParametersInfo for OkpPublicParameters { + fn kty(&self) -> JsonWebKeyType { + JsonWebKeyType::Okp + } + + fn possible_algs(&self) -> &[JsonWebSignatureAlg] { + &[JsonWebSignatureAlg::EdDsa] + } +} + +impl OkpPublicParameters { + pub const fn new(crv: JsonWebKeyOkpEllipticCurve, x: Base64UrlNoPad) -> Self { + Self { crv, x } + } +} + +mod rsa_impls { + use rsa::{BigUint, RsaPublicKey, traits::PublicKeyParts}; + + use super::{JsonWebKeyPublicParameters, RsaPublicParameters}; + use crate::base64::Base64UrlNoPad; + + impl From for JsonWebKeyPublicParameters { + fn from(key: RsaPublicKey) -> Self { + Self::from(&key) + } + } + + impl From<&RsaPublicKey> for JsonWebKeyPublicParameters { + fn from(key: &RsaPublicKey) -> Self { + Self::Rsa(key.into()) + } + } + + impl From for RsaPublicParameters { + fn from(key: RsaPublicKey) -> Self { + Self::from(&key) + } + } + + impl From<&RsaPublicKey> for RsaPublicParameters { + fn from(key: &RsaPublicKey) -> Self { + Self { + n: Base64UrlNoPad::new(key.n().to_bytes_be()), + e: Base64UrlNoPad::new(key.e().to_bytes_be()), + } + } + } + + impl TryFrom for RsaPublicKey { + type Error = rsa::errors::Error; + fn try_from(value: RsaPublicParameters) -> Result { + (&value).try_into() + } + } + + impl TryFrom<&RsaPublicParameters> for RsaPublicKey { + type Error = rsa::errors::Error; + fn try_from(value: &RsaPublicParameters) -> Result { + let n = BigUint::from_bytes_be(value.n.as_bytes()); + let e = BigUint::from_bytes_be(value.e.as_bytes()); + let key = RsaPublicKey::new(n, e)?; + Ok(key) + } + } +} + +mod ec_impls { + use digest::typenum::Unsigned; + use ecdsa::EncodedPoint; + use elliptic_curve::{ + AffinePoint, FieldBytes, PublicKey, + sec1::{Coordinates, FromEncodedPoint, ModulusSize, ToEncodedPoint}, + }; + + use super::{super::JwkEcCurve, EcPublicParameters, JsonWebKeyPublicParameters}; + use crate::base64::Base64UrlNoPad; + + impl TryFrom<&EcPublicParameters> for PublicKey + where + C: elliptic_curve::CurveArithmetic, + AffinePoint: FromEncodedPoint + ToEncodedPoint, + C::FieldBytesSize: ModulusSize + Unsigned, + { + type Error = elliptic_curve::Error; + fn try_from(value: &EcPublicParameters) -> Result { + let x = value + .x + .as_bytes() + .get(..C::FieldBytesSize::USIZE) + .ok_or(elliptic_curve::Error)?; + let y = value + .y + .as_bytes() + .get(..C::FieldBytesSize::USIZE) + .ok_or(elliptic_curve::Error)?; + + let x = FieldBytes::::from_slice(x); + let y = FieldBytes::::from_slice(y); + let pubkey = EncodedPoint::::from_affine_coordinates(x, y, false); + let pubkey: Option<_> = PublicKey::from_encoded_point(&pubkey).into(); + pubkey.ok_or(elliptic_curve::Error) + } + } + + impl From> for JsonWebKeyPublicParameters + where + C: elliptic_curve::CurveArithmetic + JwkEcCurve, + AffinePoint: FromEncodedPoint + ToEncodedPoint, + C::FieldBytesSize: ModulusSize, + { + fn from(key: PublicKey) -> Self { + (&key).into() + } + } + + impl From<&PublicKey> for JsonWebKeyPublicParameters + where + C: elliptic_curve::CurveArithmetic + JwkEcCurve, + AffinePoint: FromEncodedPoint + ToEncodedPoint, + C::FieldBytesSize: ModulusSize, + { + fn from(key: &PublicKey) -> Self { + Self::Ec(key.into()) + } + } + + impl From> for EcPublicParameters + where + C: elliptic_curve::CurveArithmetic + JwkEcCurve, + AffinePoint: FromEncodedPoint + ToEncodedPoint, + C::FieldBytesSize: ModulusSize, + { + fn from(key: PublicKey) -> Self { + (&key).into() + } + } + + impl From<&PublicKey> for EcPublicParameters + where + C: elliptic_curve::CurveArithmetic + JwkEcCurve, + AffinePoint: FromEncodedPoint + ToEncodedPoint, + C::FieldBytesSize: ModulusSize, + { + fn from(key: &PublicKey) -> Self { + let point = key.to_encoded_point(false); + let Coordinates::Uncompressed { x, y } = point.coordinates() else { + unreachable!() + }; + EcPublicParameters { + crv: C::CRV, + x: Base64UrlNoPad::new(x.to_vec()), + y: Base64UrlNoPad::new(y.to_vec()), + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_thumbprint_rfc_example() { + // From https://www.rfc-editor.org/rfc/rfc7638.html#section-3.1 + let n = Base64UrlNoPad::parse( + "\ + 0vx7agoebGcQSuuPiLJXZptN9nndrQmbXEps2aiAFbWhM78LhWx4cbbfAAt\ + VT86zwu1RK7aPFFxuhDR1L6tSoc_BJECPebWKRXjBZCiFV4n3oknjhMstn6\ + 4tZ_2W-5JsGY4Hc5n9yBXArwl93lqt7_RN5w6Cf0h4QyQ5v-65YGjQR0_FD\ + W2QvzqY368QQMicAtaSqzs8KJZgnYb9c7d0zgdAZHzu6qMQvRL5hajrn1n9\ + 1CbOpbISD08qNLyrdkt-bFTWhAI4vMQFh6WeZu0fM4lFd2NcRwr3XPksINH\ + aQ-G_xBniIqbw0Ls1jF44-csFCur-kEgU8awapJzKnqDKgw", + ) + .unwrap(); + let e = Base64UrlNoPad::parse("AQAB").unwrap(); + + let jwkpps = JsonWebKeyPublicParameters::Rsa(RsaPublicParameters { n, e }); + + assert_eq!( + jwkpps.thumbprint_sha256_base64(), + "NzbLsXh8uDCcd-6MNwXF4W_7noWXFZAfHkxZsRGC9Xs" + ); + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwt/header.rs b/matrix-authentication-service/crates/jose/src/jwt/header.rs new file mode 100644 index 00000000..c35e9fdb --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwt/header.rs @@ -0,0 +1,131 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_iana::jose::JsonWebSignatureAlg; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; +use url::Url; + +use crate::{Base64, base64::Base64UrlNoPad, jwk::PublicJsonWebKey}; + +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct JsonWebSignatureHeader { + alg: JsonWebSignatureAlg, + + #[serde(default)] + jku: Option, + + #[serde(default)] + jwk: Option>, + + #[serde(default)] + kid: Option, + + #[serde(default)] + x5u: Option, + + #[serde(default)] + x5c: Option>, + + #[serde(default)] + x5t: Option, + + #[serde(default, rename = "x5t#S256")] + x5t_s256: Option, + + #[serde(default)] + typ: Option, + + #[serde(default)] + cty: Option, + + #[serde(default)] + crit: Option>, +} + +impl JsonWebSignatureHeader { + #[must_use] + pub fn new(alg: JsonWebSignatureAlg) -> Self { + Self { + alg, + jku: None, + jwk: None, + kid: None, + x5u: None, + x5c: None, + x5t: None, + x5t_s256: None, + typ: None, + cty: None, + crit: None, + } + } + + #[must_use] + pub const fn alg(&self) -> &JsonWebSignatureAlg { + &self.alg + } + + #[must_use] + pub const fn jku(&self) -> Option<&Url> { + self.jku.as_ref() + } + + #[must_use] + pub fn with_jku(mut self, jku: Url) -> Self { + self.jku = Some(jku); + self + } + + #[must_use] + pub const fn jwk(&self) -> Option<&PublicJsonWebKey> { + // Can't use as_deref because it's not a const fn + match &self.jwk { + Some(jwk) => Some(jwk), + None => None, + } + } + + #[must_use] + pub fn with_jwk(mut self, jwk: PublicJsonWebKey) -> Self { + self.jwk = Some(Box::new(jwk)); + self + } + + #[must_use] + pub fn kid(&self) -> Option<&str> { + self.kid.as_deref() + } + + #[must_use] + pub fn with_kid(mut self, kid: impl Into) -> Self { + self.kid = Some(kid.into()); + self + } + + #[must_use] + pub fn typ(&self) -> Option<&str> { + self.typ.as_deref() + } + + #[must_use] + pub fn with_typ(mut self, typ: String) -> Self { + self.typ = Some(typ); + self + } + + #[must_use] + pub fn crit(&self) -> Option<&[String]> { + self.crit.as_deref() + } + + #[must_use] + pub fn with_crit(mut self, crit: Vec) -> Self { + self.crit = Some(crit); + self + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwt/mod.rs b/matrix-authentication-service/crates/jose/src/jwt/mod.rs new file mode 100644 index 00000000..50a77719 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwt/mod.rs @@ -0,0 +1,14 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod header; +mod raw; +mod signed; + +pub use self::{ + header::JsonWebSignatureHeader, + signed::{Jwt, JwtDecodeError, JwtSignatureError, JwtVerificationError, NoKeyWorked}, +}; diff --git a/matrix-authentication-service/crates/jose/src/jwt/raw.rs b/matrix-authentication-service/crates/jose/src/jwt/raw.rs new file mode 100644 index 00000000..46193f99 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwt/raw.rs @@ -0,0 +1,128 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{borrow::Cow, ops::Deref}; + +use thiserror::Error; + +#[derive(Clone, PartialEq, Eq)] +pub struct RawJwt<'a> { + inner: Cow<'a, str>, + first_dot: usize, + second_dot: usize, +} + +impl RawJwt<'static> { + pub(super) fn new(inner: String, first_dot: usize, second_dot: usize) -> Self { + Self { + inner: inner.into(), + first_dot, + second_dot, + } + } +} + +impl std::fmt::Display for RawJwt<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.inner) + } +} + +impl<'a> RawJwt<'a> { + pub fn header(&'a self) -> &'a str { + &self.inner[..self.first_dot] + } + + pub fn payload(&'a self) -> &'a str { + &self.inner[self.first_dot + 1..self.second_dot] + } + + pub fn signature(&'a self) -> &'a str { + &self.inner[self.second_dot + 1..] + } + + pub fn signed_part(&'a self) -> &'a str { + &self.inner[..self.second_dot] + } + + pub fn into_owned(self) -> RawJwt<'static> { + RawJwt { + inner: self.inner.into_owned().into(), + first_dot: self.first_dot, + second_dot: self.second_dot, + } + } +} + +impl Deref for RawJwt<'_> { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +#[derive(Debug, Error)] +pub enum DecodeError { + #[error("no dots found in JWT")] + NoDots, + + #[error("only one dot found in JWT")] + OnlyOneDot, + + #[error("too many dots in JWT")] + TooManyDots, +} + +impl<'a> From> for String { + fn from(val: RawJwt<'a>) -> Self { + val.inner.into() + } +} + +impl<'a> TryFrom<&'a str> for RawJwt<'a> { + type Error = DecodeError; + fn try_from(value: &'a str) -> Result { + let mut indices = value + .char_indices() + .filter_map(|(idx, c)| (c == '.').then_some(idx)); + + let first_dot = indices.next().ok_or(DecodeError::NoDots)?; + let second_dot = indices.next().ok_or(DecodeError::OnlyOneDot)?; + + if indices.next().is_some() { + return Err(DecodeError::TooManyDots); + } + + Ok(Self { + inner: value.into(), + first_dot, + second_dot, + }) + } +} + +impl TryFrom for RawJwt<'static> { + type Error = DecodeError; + fn try_from(value: String) -> Result { + let mut indices = value + .char_indices() + .filter_map(|(idx, c)| (c == '.').then_some(idx)); + + let first_dot = indices.next().ok_or(DecodeError::NoDots)?; + let second_dot = indices.next().ok_or(DecodeError::OnlyOneDot)?; + + if indices.next().is_some() { + return Err(DecodeError::TooManyDots); + } + + Ok(Self { + inner: value.into(), + first_dot, + second_dot, + }) + } +} diff --git a/matrix-authentication-service/crates/jose/src/jwt/signed.rs b/matrix-authentication-service/crates/jose/src/jwt/signed.rs new file mode 100644 index 00000000..3d632c05 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/jwt/signed.rs @@ -0,0 +1,416 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use base64ct::{Base64UrlUnpadded, Encoding}; +use rand::thread_rng; +use serde::{Serialize, de::DeserializeOwned}; +use signature::{RandomizedSigner, SignatureEncoding, Verifier, rand_core::CryptoRngCore}; +use thiserror::Error; + +use super::{header::JsonWebSignatureHeader, raw::RawJwt}; +use crate::{constraints::ConstraintSet, jwk::PublicJsonWebKeySet}; + +#[derive(Clone, PartialEq, Eq)] +pub struct Jwt<'a, T> { + raw: RawJwt<'a>, + header: JsonWebSignatureHeader, + payload: T, + signature: Vec, +} + +impl std::fmt::Display for Jwt<'_, T> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.raw) + } +} + +impl std::fmt::Debug for Jwt<'_, T> +where + T: std::fmt::Debug, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Jwt") + .field("raw", &"...") + .field("header", &self.header) + .field("payload", &self.payload) + .field("signature", &"...") + .finish() + } +} + +#[derive(Debug, Error)] +pub enum JwtDecodeError { + #[error(transparent)] + RawDecode { + #[from] + inner: super::raw::DecodeError, + }, + + #[error("failed to decode JWT header")] + DecodeHeader { + #[source] + inner: base64ct::Error, + }, + + #[error("failed to deserialize JWT header")] + DeserializeHeader { + #[source] + inner: serde_json::Error, + }, + + #[error("failed to decode JWT payload")] + DecodePayload { + #[source] + inner: base64ct::Error, + }, + + #[error("failed to deserialize JWT payload")] + DeserializePayload { + #[source] + inner: serde_json::Error, + }, + + #[error("failed to decode JWT signature")] + DecodeSignature { + #[source] + inner: base64ct::Error, + }, +} + +impl JwtDecodeError { + fn decode_header(inner: base64ct::Error) -> Self { + Self::DecodeHeader { inner } + } + + fn deserialize_header(inner: serde_json::Error) -> Self { + Self::DeserializeHeader { inner } + } + + fn decode_payload(inner: base64ct::Error) -> Self { + Self::DecodePayload { inner } + } + + fn deserialize_payload(inner: serde_json::Error) -> Self { + Self::DeserializePayload { inner } + } + + fn decode_signature(inner: base64ct::Error) -> Self { + Self::DecodeSignature { inner } + } +} + +impl<'a, T> TryFrom> for Jwt<'a, T> +where + T: DeserializeOwned, +{ + type Error = JwtDecodeError; + fn try_from(raw: RawJwt<'a>) -> Result { + let header_reader = + base64ct::Decoder::<'_, Base64UrlUnpadded>::new(raw.header().as_bytes()) + .map_err(JwtDecodeError::decode_header)?; + let header = + serde_json::from_reader(header_reader).map_err(JwtDecodeError::deserialize_header)?; + + let payload_reader = + base64ct::Decoder::<'_, Base64UrlUnpadded>::new(raw.payload().as_bytes()) + .map_err(JwtDecodeError::decode_payload)?; + let payload = + serde_json::from_reader(payload_reader).map_err(JwtDecodeError::deserialize_payload)?; + + let signature = Base64UrlUnpadded::decode_vec(raw.signature()) + .map_err(JwtDecodeError::decode_signature)?; + + Ok(Self { + raw, + header, + payload, + signature, + }) + } +} + +impl<'a, T> TryFrom<&'a str> for Jwt<'a, T> +where + T: DeserializeOwned, +{ + type Error = JwtDecodeError; + fn try_from(value: &'a str) -> Result { + let raw = RawJwt::try_from(value)?; + Self::try_from(raw) + } +} + +impl TryFrom for Jwt<'static, T> +where + T: DeserializeOwned, +{ + type Error = JwtDecodeError; + fn try_from(value: String) -> Result { + let raw = RawJwt::try_from(value)?; + Self::try_from(raw) + } +} + +#[derive(Debug, Error)] +pub enum JwtVerificationError { + #[error("failed to parse signature")] + ParseSignature, + + #[error("signature verification failed")] + Verify { + #[source] + inner: signature::Error, + }, +} + +impl JwtVerificationError { + #[allow(clippy::needless_pass_by_value)] + fn parse_signature(_inner: E) -> Self { + Self::ParseSignature + } + + fn verify(inner: signature::Error) -> Self { + Self::Verify { inner } + } +} + +#[derive(Debug, Error, Default)] +#[error("none of the keys worked")] +pub struct NoKeyWorked { + _inner: (), +} + +impl<'a, T> Jwt<'a, T> { + /// Get the JWT header + pub fn header(&self) -> &JsonWebSignatureHeader { + &self.header + } + + /// Get the JWT payload + pub fn payload(&self) -> &T { + &self.payload + } + + pub fn into_owned(self) -> Jwt<'static, T> { + Jwt { + raw: self.raw.into_owned(), + header: self.header, + payload: self.payload, + signature: self.signature, + } + } + + /// Verify the signature of this JWT using the given key. + /// + /// # Errors + /// + /// Returns an error if the signature is invalid. + pub fn verify(&self, key: &K) -> Result<(), JwtVerificationError> + where + K: Verifier, + S: SignatureEncoding, + { + let signature = + S::try_from(&self.signature).map_err(JwtVerificationError::parse_signature)?; + + key.verify(self.raw.signed_part().as_bytes(), &signature) + .map_err(JwtVerificationError::verify) + } + + /// Verify the signature of this JWT using the given symmetric key. + /// + /// # Errors + /// + /// Returns an error if the signature is invalid or if the algorithm is not + /// supported. + pub fn verify_with_shared_secret(&self, secret: Vec) -> Result<(), NoKeyWorked> { + let verifier = crate::jwa::SymmetricKey::new_for_alg(secret, self.header().alg()) + .map_err(|_| NoKeyWorked::default())?; + + self.verify(&verifier).map_err(|_| NoKeyWorked::default())?; + + Ok(()) + } + + /// Verify the signature of this JWT using the given JWKS. + /// + /// # Errors + /// + /// Returns an error if the signature is invalid, if no key matches the + /// constraints, or if the algorithm is not supported. + pub fn verify_with_jwks(&self, jwks: &PublicJsonWebKeySet) -> Result<(), NoKeyWorked> { + let constraints = ConstraintSet::from(self.header()); + let candidates = constraints.filter(&**jwks); + + for candidate in candidates { + let Ok(key) = crate::jwa::AsymmetricVerifyingKey::from_jwk_and_alg( + candidate.params(), + self.header().alg(), + ) else { + continue; + }; + + if self.verify(&key).is_ok() { + return Ok(()); + } + } + + Err(NoKeyWorked::default()) + } + + /// Get the raw JWT string as a borrowed [`str`] + pub fn as_str(&'a self) -> &'a str { + &self.raw + } + + /// Get the raw JWT string as an owned [`String`] + pub fn into_string(self) -> String { + self.raw.into() + } + + /// Split the JWT into its parts (header and payload). + pub fn into_parts(self) -> (JsonWebSignatureHeader, T) { + (self.header, self.payload) + } +} + +#[derive(Debug, Error)] +pub enum JwtSignatureError { + #[error("failed to serialize header")] + EncodeHeader { + #[source] + inner: serde_json::Error, + }, + + #[error("failed to serialize payload")] + EncodePayload { + #[source] + inner: serde_json::Error, + }, + + #[error("failed to sign")] + Signature { + #[from] + inner: signature::Error, + }, +} + +impl JwtSignatureError { + fn encode_header(inner: serde_json::Error) -> Self { + Self::EncodeHeader { inner } + } + + fn encode_payload(inner: serde_json::Error) -> Self { + Self::EncodePayload { inner } + } +} + +impl Jwt<'static, T> { + /// Sign the given payload with the given key. + /// + /// # Errors + /// + /// Returns an error if the payload could not be serialized or if the key + /// could not sign the payload. + pub fn sign( + header: JsonWebSignatureHeader, + payload: T, + key: &K, + ) -> Result + where + K: RandomizedSigner, + S: SignatureEncoding, + T: Serialize, + { + #[allow(clippy::disallowed_methods)] + Self::sign_with_rng(&mut thread_rng(), header, payload, key) + } + + /// Sign the given payload with the given key using the given RNG. + /// + /// # Errors + /// + /// Returns an error if the payload could not be serialized or if the key + /// could not sign the payload. + pub fn sign_with_rng( + rng: &mut R, + header: JsonWebSignatureHeader, + payload: T, + key: &K, + ) -> Result + where + R: CryptoRngCore, + K: RandomizedSigner, + S: SignatureEncoding, + T: Serialize, + { + let header_ = serde_json::to_vec(&header).map_err(JwtSignatureError::encode_header)?; + let header_ = Base64UrlUnpadded::encode_string(&header_); + + let payload_ = serde_json::to_vec(&payload).map_err(JwtSignatureError::encode_payload)?; + let payload_ = Base64UrlUnpadded::encode_string(&payload_); + + let mut inner = format!("{header_}.{payload_}"); + + let first_dot = header_.len(); + let second_dot = inner.len(); + + let signature = key.try_sign_with_rng(rng, inner.as_bytes())?.to_vec(); + let signature_ = Base64UrlUnpadded::encode_string(&signature); + inner.reserve_exact(1 + signature_.len()); + inner.push('.'); + inner.push_str(&signature_); + + let raw = RawJwt::new(inner, first_dot, second_dot); + + Ok(Self { + raw, + header, + payload, + signature, + }) + } +} + +#[cfg(test)] +mod tests { + #![allow(clippy::disallowed_methods)] + use mas_iana::jose::JsonWebSignatureAlg; + use rand::thread_rng; + + use super::*; + + #[test] + fn test_jwt_decode() { + let jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c"; + let jwt: Jwt<'_, serde_json::Value> = Jwt::try_from(jwt).unwrap(); + assert_eq!(jwt.raw.header(), "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"); + assert_eq!( + jwt.raw.payload(), + "eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ" + ); + assert_eq!( + jwt.raw.signature(), + "SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" + ); + assert_eq!( + jwt.raw.signed_part(), + "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ" + ); + } + + #[test] + fn test_jwt_sign_and_verify() { + let header = JsonWebSignatureHeader::new(JsonWebSignatureAlg::Es256); + let payload = serde_json::json!({"hello": "world"}); + + let key = ecdsa::SigningKey::::random(&mut thread_rng()); + let signed = Jwt::sign::<_, ecdsa::Signature<_>>(header, payload, &key).unwrap(); + signed + .verify::<_, ecdsa::Signature<_>>(key.verifying_key()) + .unwrap(); + } +} diff --git a/matrix-authentication-service/crates/jose/src/lib.rs b/matrix-authentication-service/crates/jose/src/lib.rs new file mode 100644 index 00000000..3384d9f1 --- /dev/null +++ b/matrix-authentication-service/crates/jose/src/lib.rs @@ -0,0 +1,17 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![deny(rustdoc::broken_intra_doc_links)] +#![allow(clippy::module_name_repetitions)] + +mod base64; +pub mod claims; +pub mod constraints; +pub mod jwa; +pub mod jwk; +pub mod jwt; + +pub use self::base64::Base64; diff --git a/matrix-authentication-service/crates/jose/tests/generate.py b/matrix-authentication-service/crates/jose/tests/generate.py new file mode 100644 index 00000000..e70f5725 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/generate.py @@ -0,0 +1,143 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +# Generates test keys, JWKS and JWTs +# Required the `openssl` binary and the `authlib` python library + +import json +import subprocess +from pathlib import Path +from typing import List + +from authlib.jose import JsonWebKey, JsonWebSignature, KeySet + +output_path = Path(__file__).parent + +keys_path = output_path / "keys" +keys_path.mkdir(parents=True, exist_ok=True) + +jwts_path = output_path / "jwts" +jwts_path.mkdir(parents=True, exist_ok=True) + + +def gen_key( + name: str, + priv_command: List[str], + pub_command: List[str], +): + """Generate a keypair + + Args: + name: Name + priv_command: Command to generate the private key. This must write the + key to stdout. + pub_command: Command to convert the private key to a public one. This + must read the private key from stdin and write the public key to + stdout. + """ + priv_path = keys_path / f"{name}.priv.pem" + pub_path = keys_path / f"{name}.pub.pem" + + with open(priv_path, "wb") as f: + subprocess.run(priv_command, stdout=f, stderr=subprocess.DEVNULL) + + with open(priv_path, "rb") as priv, open(pub_path, "wb") as pub: + subprocess.run(pub_command, stdin=priv, stdout=pub, stderr=subprocess.DEVNULL) + + +def import_key(name: str, kty: str) -> JsonWebKey: + """Import a key from a file""" + with open(keys_path / name, "r") as f: + pem = f.read() + return JsonWebKey.import_key(pem, {"kty": kty}) + + +def sign_jwt(alg: str, filename: str, key: JsonWebKey): + """Sign a JWT for the given key""" + path = jwts_path / filename + protected = {"alg": alg, "kid": key.thumbprint()} + payload = '{"hello":"world"}' + jws = JsonWebSignature(algorithms=[alg]) + jwt = jws.serialize_compact(protected, payload, key) + with open(path, "wb") as f: + f.write(jwt) + + +with open(keys_path / "oct.bin", "wb") as f: + subprocess.run( + ["openssl", "rand", "-hex", "64"], stdout=f, stderr=subprocess.DEVNULL + ) + +gen_key("rsa", ["openssl", "genrsa", "2048"], ["openssl", "rsa", "-pubout"]) +gen_key( + "p256", + ["openssl", "ecparam", "-genkey", "-name", "prime256v1"], + ["openssl", "ec", "-pubout"], +) +gen_key( + "p384", + ["openssl", "ecparam", "-genkey", "-name", "secp384r1"], + ["openssl", "ec", "-pubout"], +) +gen_key( + "p521", + ["openssl", "ecparam", "-genkey", "-name", "secp521r1"], + ["openssl", "ec", "-pubout"], +) +gen_key( + "k256", + ["openssl", "ecparam", "-genkey", "-name", "secp256k1"], + ["openssl", "ec", "-pubout"], +) +gen_key( + "ed25519", + ["openssl", "genpkey", "-algorithm", "ed25519"], + ["openssl", "pkey", "-pubout"], +) +gen_key( + "ed448", + ["openssl", "genpkey", "-algorithm", "ed448"], + ["openssl", "pkey", "-pubout"], +) + +oct_key = import_key("oct.bin", "oct") +rsa_key = import_key("rsa.priv.pem", "RSA") +p256_key = import_key("p256.priv.pem", "EC") +p384_key = import_key("p384.priv.pem", "EC") +p521_key = import_key("p521.priv.pem", "EC") +k256_key = import_key("k256.priv.pem", "EC") +ed25519_key = import_key("ed25519.priv.pem", "OKP") +ed448_key = import_key("ed448.priv.pem", "OKP") + +key_set = KeySet( + [rsa_key, p256_key, p384_key, p521_key, k256_key, ed25519_key, ed448_key] +) + +with open(keys_path / "jwks.pub.json", "w", encoding="utf8") as f: + json.dump(key_set.as_dict(is_private=False), f, indent=2, sort_keys=True) + +key_set.keys.insert(0, oct_key) + +with open(keys_path / "jwks.priv.json", "w", encoding="utf8") as f: + json.dump(key_set.as_dict(is_private=True), f, indent=2, sort_keys=True) + +sign_jwt("HS256", "hs256.jwt", oct_key) +sign_jwt("HS384", "hs384.jwt", oct_key) +sign_jwt("HS512", "hs512.jwt", oct_key) +sign_jwt("RS256", "rs256.jwt", rsa_key) +sign_jwt("RS384", "rs384.jwt", rsa_key) +sign_jwt("RS512", "rs512.jwt", rsa_key) +sign_jwt("RS256", "rs256.jwt", rsa_key) +sign_jwt("RS384", "rs384.jwt", rsa_key) +sign_jwt("RS512", "rs512.jwt", rsa_key) +sign_jwt("PS256", "ps256.jwt", rsa_key) +sign_jwt("PS384", "ps384.jwt", rsa_key) +sign_jwt("PS512", "ps512.jwt", rsa_key) +sign_jwt("ES256", "es256.jwt", p256_key) +sign_jwt("ES384", "es384.jwt", p384_key) +sign_jwt("ES512", "es512.jwt", p521_key) +sign_jwt("ES256K", "es256k.jwt", k256_key) +sign_jwt("EdDSA", "eddsa-ed25519.jwt", ed25519_key) +sign_jwt("EdDSA", "eddsa-ed448.jwt", ed448_key) diff --git a/matrix-authentication-service/crates/jose/tests/jws.rs b/matrix-authentication-service/crates/jose/tests/jws.rs new file mode 100644 index 00000000..d7068cda --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jws.rs @@ -0,0 +1,225 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +static HS256_JWT: &str = include_str!("./jwts/hs256.jwt"); +static HS384_JWT: &str = include_str!("./jwts/hs384.jwt"); +static HS512_JWT: &str = include_str!("./jwts/hs512.jwt"); +static RS256_JWT: &str = include_str!("./jwts/rs256.jwt"); +static RS384_JWT: &str = include_str!("./jwts/rs384.jwt"); +static RS512_JWT: &str = include_str!("./jwts/rs512.jwt"); +static PS256_JWT: &str = include_str!("./jwts/ps256.jwt"); +static PS384_JWT: &str = include_str!("./jwts/ps384.jwt"); +static PS512_JWT: &str = include_str!("./jwts/ps512.jwt"); +static ES256_JWT: &str = include_str!("./jwts/es256.jwt"); +static ES384_JWT: &str = include_str!("./jwts/es384.jwt"); +static ES512_JWT: &str = include_str!("./jwts/es512.jwt"); +static ES256K_JWT: &str = include_str!("./jwts/es256k.jwt"); +static EDDSA_ED25519_JWT: &str = include_str!("./jwts/eddsa-ed25519.jwt"); +static EDDSA_ED448_JWT: &str = include_str!("./jwts/eddsa-ed448.jwt"); +static OCT_KEY: &[u8] = include_bytes!("./keys/oct.bin"); + +fn public_jwks() -> mas_jose::jwk::PublicJsonWebKeySet { + serde_json::from_str(include_str!("./keys/jwks.pub.json")).unwrap() +} + +fn private_jwks() -> mas_jose::jwk::PrivateJsonWebKeySet { + serde_json::from_str(include_str!("./keys/jwks.priv.json")).unwrap() +} + +fn oct_key() -> Vec { + OCT_KEY.to_vec() +} + +#[derive(serde::Deserialize, serde::Serialize)] +struct Payload { + hello: String, +} + +macro_rules! conditional { + { true => $($tt:tt)* } => { + $($tt)* + }; + { false => $($tt:tt)* } => {}; +} + +macro_rules! asymetric_jwt_test { + ($test_name:ident, $alg:ident, $jwt:ident) => { + asymetric_jwt_test!($test_name, $alg, $jwt, supported = true); + }; + ($test_name:ident, $alg:ident, $jwt:ident, supported = $supported:ident) => { + mod $test_name { + use std::ops::Deref; + + use mas_iana::jose::JsonWebSignatureAlg; + use mas_jose::{constraints::ConstraintSet, jwt::Jwt}; + + use super::*; + + #[test] + fn validate_jwt() { + let jwt: Jwt<'_, Payload> = Jwt::try_from($jwt).unwrap(); + assert_eq!(jwt.payload().hello, "world"); + assert_eq!(*jwt.header().alg(), JsonWebSignatureAlg::$alg); + } + + #[test] + fn find_public_key() { + let jwks = public_jwks(); + let jwt: Jwt<'_, Payload> = Jwt::try_from($jwt).unwrap(); + + let constraints = ConstraintSet::from(jwt.header()); + let candidates = constraints.filter(jwks.deref()); + assert_eq!(candidates.len(), 1); + } + + #[test] + fn find_private_key() { + let jwks = private_jwks(); + let jwt: Jwt<'_, Payload> = Jwt::try_from($jwt).unwrap(); + + let constraints = ConstraintSet::from(jwt.header()); + let candidates = constraints.filter(jwks.deref()); + assert_eq!(candidates.len(), 1); + } + + conditional! { $supported => + use mas_jose::jwt::JsonWebSignatureHeader; + use rand_chacha::ChaCha8Rng; + use rand::SeedableRng; + + #[test] + fn verify_jwt() { + let jwks = public_jwks(); + let jwt: Jwt<'_, Payload> = Jwt::try_from($jwt).unwrap(); + + let key = jwks.find_key(&jwt.header().into()).unwrap(); + + let key = mas_jose::jwa::AsymmetricVerifyingKey::from_jwk_and_alg( + key.params(), + &JsonWebSignatureAlg::$alg, + ) + .unwrap(); + + jwt.verify(&key).unwrap(); + } + + #[test] + fn sign_jwt() { + let mut rng = ChaCha8Rng::seed_from_u64(42); + let alg = JsonWebSignatureAlg::$alg; + let payload = Payload { + hello: "world".to_owned(), + }; + let header = JsonWebSignatureHeader::new(alg.clone()); + + let jwks = private_jwks(); + let key = jwks.signing_key_for_algorithm(&alg).unwrap(); + + let key = mas_jose::jwa::AsymmetricSigningKey::from_jwk_and_alg(key.params(), &alg) + .unwrap(); + + let jwt: Jwt<'_, Payload> = Jwt::sign_with_rng(&mut rng, header, payload, &key).unwrap(); + insta::assert_snapshot!(jwt.as_str()); + } + + #[test] + fn sign_and_verify_jwt() { + let alg = JsonWebSignatureAlg::$alg; + let payload = Payload { + hello: "world".to_owned(), + }; + let header = JsonWebSignatureHeader::new(alg.clone()); + + let jwks = private_jwks(); + let key = jwks.signing_key_for_algorithm(&alg).unwrap(); + + let key = mas_jose::jwa::AsymmetricSigningKey::from_jwk_and_alg(key.params(), &alg) + .unwrap(); + + let jwt: Jwt<'_, Payload> = Jwt::sign(header, payload, &key).unwrap(); + let jwt: Jwt<'_, Payload> = Jwt::try_from(jwt.as_str()).unwrap(); + + let jwks = public_jwks(); + let key = jwks.find_key(&jwt.header().into()).unwrap(); + + let key = + mas_jose::jwa::AsymmetricVerifyingKey::from_jwk_and_alg(key.params(), &alg) + .unwrap(); + + jwt.verify(&key).unwrap(); + } + } + } + }; +} + +macro_rules! symetric_jwt_test { + ($test_name:ident, $alg:ident, $jwt:ident) => { + mod $test_name { + use mas_iana::jose::JsonWebSignatureAlg; + use mas_jose::jwt::{JsonWebSignatureHeader, Jwt}; + + use super::*; + + #[test] + fn validate_jwt() { + let jwt: Jwt<'_, Payload> = Jwt::try_from($jwt).unwrap(); + assert_eq!(jwt.payload().hello, "world"); + assert_eq!(*jwt.header().alg(), JsonWebSignatureAlg::$alg); + } + + #[test] + fn verify_jwt() { + let jwt: Jwt<'_, Payload> = Jwt::try_from($jwt).unwrap(); + let key = + mas_jose::jwa::SymmetricKey::new_for_alg(oct_key(), &JsonWebSignatureAlg::$alg) + .unwrap(); + jwt.verify(&key).unwrap(); + } + + #[test] + fn sign_and_verify_jwt() { + let alg = JsonWebSignatureAlg::$alg; + let payload = Payload { + hello: "world".to_owned(), + }; + let header = JsonWebSignatureHeader::new(alg.clone()); + + let key = mas_jose::jwa::SymmetricKey::new_for_alg(oct_key(), &alg).unwrap(); + + let jwt: Jwt<'_, Payload> = Jwt::sign(header, payload, &key).unwrap(); + let jwt: Jwt<'_, Payload> = Jwt::try_from(jwt.as_str()).unwrap(); + + jwt.verify(&key).unwrap(); + } + } + }; +} + +symetric_jwt_test!(hs256, Hs256, HS256_JWT); +symetric_jwt_test!(hs384, Hs384, HS384_JWT); +symetric_jwt_test!(hs512, Hs512, HS512_JWT); + +asymetric_jwt_test!(rs256, Rs256, RS256_JWT); +asymetric_jwt_test!(rs384, Rs384, RS384_JWT); +asymetric_jwt_test!(rs512, Rs512, RS512_JWT); +asymetric_jwt_test!(ps256, Ps256, PS256_JWT); +asymetric_jwt_test!(ps384, Ps384, PS384_JWT); +asymetric_jwt_test!(ps512, Ps512, PS512_JWT); +asymetric_jwt_test!(es256, Es256, ES256_JWT); +asymetric_jwt_test!(es384, Es384, ES384_JWT); +asymetric_jwt_test!(es512, Es512, ES512_JWT, supported = false); +asymetric_jwt_test!(es256k, Es256K, ES256K_JWT); +asymetric_jwt_test!(eddsa_ed25519, EdDsa, EDDSA_ED25519_JWT, supported = false); +asymetric_jwt_test!(eddsa_ed448, EdDsa, EDDSA_ED448_JWT, supported = false); + +#[test] +fn test_private_to_public_jwks() { + let priv_jwks = private_jwks(); + let pub_jwks = mas_jose::jwk::PublicJsonWebKeySet::from(priv_jwks); + + assert_eq!(pub_jwks, public_jwks()); +} diff --git a/matrix-authentication-service/crates/jose/tests/jwts/eddsa-ed25519.jwt b/matrix-authentication-service/crates/jose/tests/jwts/eddsa-ed25519.jwt new file mode 100644 index 00000000..4d7793e1 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/eddsa-ed25519.jwt @@ -0,0 +1 @@ +eyJhbGciOiJFZERTQSIsImtpZCI6ImlYa2l5aEVoNkU3VS1hWDBmZzd3LWVzSFdxUHZ2eFdkNmdIMUpHMnU3TjAifQ.eyJoZWxsbyI6IndvcmxkIn0.ZFiNWsheqUC_mQNztHpZXLnyb5LtvyT1dTGcMSCgG97Cobju83xCIkbJwfjOSgZrI2CpEVobVM_mfnmFIAUfBg \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/eddsa-ed448.jwt b/matrix-authentication-service/crates/jose/tests/jwts/eddsa-ed448.jwt new file mode 100644 index 00000000..e7e36b04 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/eddsa-ed448.jwt @@ -0,0 +1 @@ +eyJhbGciOiJFZERTQSIsImtpZCI6IlFsdGEycVZsaEhoZzNqcmlKcDBIc0lCUXFHVkIxWkgycEVueVBIemwxTXMifQ.eyJoZWxsbyI6IndvcmxkIn0.7EqBc73c8UjbZnW5LkkDmPlAnlgjVdDzfABvssoLE3FoFX3uUr1dPdX3I9Hu_rtOIdRtTLfN9eeABuG5cugUoshrYSFuHF6vy2Nim7uM3GWa6mVZx6fzOBq6goCK4JpNfwkJ3a4VyslHU7wQBfXAOxcA \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/es256.jwt b/matrix-authentication-service/crates/jose/tests/jwts/es256.jwt new file mode 100644 index 00000000..169b0676 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/es256.jwt @@ -0,0 +1 @@ +eyJhbGciOiJFUzI1NiIsImtpZCI6ImxNYlNJNjlhanNCSEhrSXBWQUZLUktZblI2NmtHZEd0ZWcyb3FNenAwX0UifQ.eyJoZWxsbyI6IndvcmxkIn0.YckCGhpak2hpO9EiR-X2MD6CVBnUAmQbRVKvKoYCbRnydOOksNlzWaOl0S-C4KZxGTuKG-spzFQJov5h_ob5nw \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/es256k.jwt b/matrix-authentication-service/crates/jose/tests/jwts/es256k.jwt new file mode 100644 index 00000000..cf20eedb --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/es256k.jwt @@ -0,0 +1 @@ +eyJhbGciOiJFUzI1NksiLCJraWQiOiJuOWI0Z3lkNU5nSHY3cEo3UzI3QUtCcmhCUEhhM0g1cHRjaXhtWWVyU1VnIn0.eyJoZWxsbyI6IndvcmxkIn0.e0XIMec0_gvlxS8je5hVpYQGls2A5r2TUJ9eJNmdwZQbo1alRB93dgbh3yd4fh8bDOmmLhRfMKti93c7-ljPVg \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/es384.jwt b/matrix-authentication-service/crates/jose/tests/jwts/es384.jwt new file mode 100644 index 00000000..6a23aafa --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/es384.jwt @@ -0,0 +1 @@ +eyJhbGciOiJFUzM4NCIsImtpZCI6IkoxRVpKR1AxTHloWDJabHo4eFBjc3BNUUVsOFczYllHMngzTnFpTWJQeVkifQ.eyJoZWxsbyI6IndvcmxkIn0.XK3AIs0TQ1r5Wbpd14MkVIp3rvisQEb_8wlp3F4usveL23GH15y5TQ8mcU5NrxNFFylclwikyz4ozM2zmU7fkCYfjKD8AoEABOTlfjH3DRQnynVcpkvB47CsSgt8QpGe \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/es512.jwt b/matrix-authentication-service/crates/jose/tests/jwts/es512.jwt new file mode 100644 index 00000000..8b70bb84 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/es512.jwt @@ -0,0 +1 @@ +eyJhbGciOiJFUzUxMiIsImtpZCI6Il94R3lJM21zOTBBdmdGNjU4d3o5NzF3c3dTeVluR1NHX0EwZEFnbXJBTTAifQ.eyJoZWxsbyI6IndvcmxkIn0.AJ9YcP56d-1Z1wsZL0ikFRY_4Q6du7YEWsqtQDOloCLMYQ-3citw6Fm35t4kg8E5aoe8QrEj8kTqsQLloWv0eBMFAWh-Uyrupmz0Kzllc6xbOEVoWuM5DWc6AJ6Da6k0f6XHsZ_MVcayQpdmZTLcM_pyo1U6olqwLYqv1YNx-2M2GdCl \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/hs256.jwt b/matrix-authentication-service/crates/jose/tests/jwts/hs256.jwt new file mode 100644 index 00000000..ff2c13be --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/hs256.jwt @@ -0,0 +1 @@ +eyJhbGciOiJIUzI1NiIsImtpZCI6ImRqSEtvV1Uzck9sV2c1RTBFSV80RmxiRVRmZDRPRlFnVjk4REZYRW1HZmcifQ.eyJoZWxsbyI6IndvcmxkIn0.GBxkJdc15D26siv1Ov_a2jgQSIsgLwiF2ZDFSUdzoFY \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/hs384.jwt b/matrix-authentication-service/crates/jose/tests/jwts/hs384.jwt new file mode 100644 index 00000000..a6c79979 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/hs384.jwt @@ -0,0 +1 @@ +eyJhbGciOiJIUzM4NCIsImtpZCI6ImRqSEtvV1Uzck9sV2c1RTBFSV80RmxiRVRmZDRPRlFnVjk4REZYRW1HZmcifQ.eyJoZWxsbyI6IndvcmxkIn0.pOZkiI4HMCNHgUf9diq6CkFxsMIMCNADvDPHmtkjerSYWy16dmlZy-FT9ZxyyD_1 \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/hs512.jwt b/matrix-authentication-service/crates/jose/tests/jwts/hs512.jwt new file mode 100644 index 00000000..bebbb1a3 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/hs512.jwt @@ -0,0 +1 @@ +eyJhbGciOiJIUzUxMiIsImtpZCI6ImRqSEtvV1Uzck9sV2c1RTBFSV80RmxiRVRmZDRPRlFnVjk4REZYRW1HZmcifQ.eyJoZWxsbyI6IndvcmxkIn0.1kVwcE7LajF4Ph3yl2cKhJRs4FtZUMT6mxVCbtfttLPLqkxX-WAlZ0Hd7zg1JAzxNUmkeF8bsgZ9P0bPxBDSyw \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/ps256.jwt b/matrix-authentication-service/crates/jose/tests/jwts/ps256.jwt new file mode 100644 index 00000000..c97ee9d3 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/ps256.jwt @@ -0,0 +1 @@ +eyJhbGciOiJQUzI1NiIsImtpZCI6ImxqQXdGc1czMmV4cHlBMFJqcktvT0h1WnhmazdLTFNlajh6bGRPOXo0aVUifQ.eyJoZWxsbyI6IndvcmxkIn0.JWY1HZhLrDngEV7-V7to489hsX3muDOeCr4cedGUY2cpDNgJs0CgTe1pknXws9msZSlG4C-oA08UqgousBA2FWbcuVDhSEmSyNWM2rHekFuYcLlAupP8wucMQ3yzP425V2PzlgWV85xRe18PifNaTldMHLArbTKplMQgHHHopz28kuP1Uko99lHxpZrDVMHSLXNTyYaoQeOd81Hbx8uSx5wZO6tVIErV1RhKhSFGLP9DsbOKKW6jRgam_tKNh35VYBQZ6CIQkgsZCruDP7KFHHqC4xHTbkNQ6VlxHHHOpHz-SuRcBS901EN6NVCSPRSc0oYp1ChQCPgUeH_SrloCMg \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/ps384.jwt b/matrix-authentication-service/crates/jose/tests/jwts/ps384.jwt new file mode 100644 index 00000000..adf3723e --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/ps384.jwt @@ -0,0 +1 @@ +eyJhbGciOiJQUzM4NCIsImtpZCI6ImxqQXdGc1czMmV4cHlBMFJqcktvT0h1WnhmazdLTFNlajh6bGRPOXo0aVUifQ.eyJoZWxsbyI6IndvcmxkIn0.XLe8Fxg1wALfGIYBtGtYCSxneiReNMRsUiXukYPS3KWvIH6xcLV93GflNRBHRE1aijy1GPnqZv-mZoKjfZr4PoZMX0MalE0j0bFqrLJvfoyxlZLTIzjfyYm81JtPwlB3iU3DvqKGAYBE8aknTOnv65nyprdhGuJhFEW-_7omDzXqE03DofIGQOu-F3nkVP5Om28VKY6Vdr7PswJhKawP97VXrhN5aIubSjldv5-LcKlVwjV9_3RTiEbVGCgluyhzUUhoa-y0Y1oplJC4GMzvQ1YCYQeYJOn0bB1FjpOryJ2mxlIf8qNzlDHnpyr5MVRJ2PAlhZ31GB5JGr_ZQYTRUA \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/ps512.jwt b/matrix-authentication-service/crates/jose/tests/jwts/ps512.jwt new file mode 100644 index 00000000..43fde27c --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/ps512.jwt @@ -0,0 +1 @@ +eyJhbGciOiJQUzUxMiIsImtpZCI6ImxqQXdGc1czMmV4cHlBMFJqcktvT0h1WnhmazdLTFNlajh6bGRPOXo0aVUifQ.eyJoZWxsbyI6IndvcmxkIn0.aQ6sXJsU-U14WW7cD344aZi72Hf_XNq9LBi0_feKRVQOO98gV-jlBKWer-n_FI1qLcOUoHfitOciTOVLgvYxeJUwePCwUm9JhQ186CLAc6i_AhqpeKRlDkVOF_viQeZTFwEadHT2KMIe0ImZnPqGUb07arUdzGO67Lwsts2ob7qgG_uWgVbjXMkTUwt-JSHdXUcGIz1FgCJaFgGygfQE_I_doNiApWr2okiuIMs_4Q5BfxIlvPR-uaOcpqxk7ldukvQgUjv4rTfOGE12fCx5eLDVF4P1OXgMjgmcXH1yaV89DgTBgDPP11tQrbsFbANX004VLF9MQWoVF6esl6xwQw \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/rs256.jwt b/matrix-authentication-service/crates/jose/tests/jwts/rs256.jwt new file mode 100644 index 00000000..136e3e4d --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/rs256.jwt @@ -0,0 +1 @@ +eyJhbGciOiJSUzI1NiIsImtpZCI6ImxqQXdGc1czMmV4cHlBMFJqcktvT0h1WnhmazdLTFNlajh6bGRPOXo0aVUifQ.eyJoZWxsbyI6IndvcmxkIn0.JLzSM5NDbAIb5vpbnKJeHUgU-uJ46616qzDjWXRbIAdxPk8WUqpRDRTlPoRUBXsAKn7E14r_CZmwvGAgJipS7EY0PbJYOkA_6oi8sYWykMUT1F2BlqKQGv2BvRR0LGu0tmm4XYZT2nOLRiEa4bs9l-D2jA5GRTKjDnmgUBHXpX4vIICtnkHHvZilMf1Fjsdm-3X9NFmxjtvQChg-w0h6hM3NZAt6Gd5AG8MaFf-mj3sLa40c51XXz1J1WE9iOWF8lGC6EfP5MSWunKnhyHf2xPQiH4C_Tvm529p2EiEBjjoL1f2A8WH8EYruHF8AXsz2F8HxN_7ryGmjrqLGwuw7iQ \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/rs384.jwt b/matrix-authentication-service/crates/jose/tests/jwts/rs384.jwt new file mode 100644 index 00000000..efb5c6b2 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/rs384.jwt @@ -0,0 +1 @@ +eyJhbGciOiJSUzM4NCIsImtpZCI6ImxqQXdGc1czMmV4cHlBMFJqcktvT0h1WnhmazdLTFNlajh6bGRPOXo0aVUifQ.eyJoZWxsbyI6IndvcmxkIn0.Qf9DbTCoisWvPvGYahn-dMe9r-escYv5cTL-5X2tz5uPRUmAEJ6D6cn0VtLCCPmTIuzSYDzeMqdEx1Is-AVkzvWMKdFRXNVL_E54bhS6Dg04a75bL4YGQOg8iaTTdRlHMaLLfClf8sXttpHmnOFhQ9C6pLcmtT5cfle8qrAw9x7Ivri7jkcjydWcR2WKsYHJxEWDwdhDiBK461F2fi9YtbZOL4qdKEoYpg08v4jH7hFf5G60W_k2oKvPQnbVJe0VcnGcEXvItMAEi8omMn3_OxIGNH-mxBf9DOpOu8Vj-kvvWuE03f31goWLqiL6-Eq8ykqqFZ3sKb23WfGPd26pDw \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/jwts/rs512.jwt b/matrix-authentication-service/crates/jose/tests/jwts/rs512.jwt new file mode 100644 index 00000000..f03fb845 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/jwts/rs512.jwt @@ -0,0 +1 @@ +eyJhbGciOiJSUzUxMiIsImtpZCI6ImxqQXdGc1czMmV4cHlBMFJqcktvT0h1WnhmazdLTFNlajh6bGRPOXo0aVUifQ.eyJoZWxsbyI6IndvcmxkIn0.UevGIlEIlrQWvLLm3Iouq6cxjWf7CtFwaDXQOUEQzdQxa3Mg9H0KD7Ztc1LRS36RFd0rnh9dWsXmeDbQ9yWNepnRvv0QP8Vxq3ty7wOHZtLn2kG1SjDQqgaU743p4n-YUpVugzSha0RHTiRN1TU4zufpx26jQBuO7ihOFof6trc8E2UG98Pgd1w3kv20Glwo_cWauhAefgDRhS-sOaH_SsOFWSBNCa8ISeIOiuKLFOEp2o1m2sla0yCDHVptERYDp3D_LHTLX-BP0dyaxpKwfQ7EuECGK1r7_yyiSq_pOwPrainC3lBKYovOgj8tYGTJxfw4Au_QSY57J96M7N4TmA \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/keys/ed25519.priv.pem b/matrix-authentication-service/crates/jose/tests/keys/ed25519.priv.pem new file mode 100644 index 00000000..c3e3badf --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/ed25519.priv.pem @@ -0,0 +1,3 @@ +-----BEGIN PRIVATE KEY----- +MC4CAQAwBQYDK2VwBCIEIIutDmtMjMBKXN/Oxmfvxw3cNwtqgcyR2awtQYH/OS/5 +-----END PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/ed25519.pub.pem b/matrix-authentication-service/crates/jose/tests/keys/ed25519.pub.pem new file mode 100644 index 00000000..26e2dc92 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/ed25519.pub.pem @@ -0,0 +1,3 @@ +-----BEGIN PUBLIC KEY----- +MCowBQYDK2VwAyEAnVo63sClAQ8qwBAZW0tttHFhXdrLiKqJnFeJ+j3nA3U= +-----END PUBLIC KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/ed448.priv.pem b/matrix-authentication-service/crates/jose/tests/keys/ed448.priv.pem new file mode 100644 index 00000000..269ce375 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/ed448.priv.pem @@ -0,0 +1,4 @@ +-----BEGIN PRIVATE KEY----- +MEcCAQAwBQYDK2VxBDsEOdwdrXdIIxmkz/6pi3/JeOemGYvMECA+CvW5CAGXCvwi +VXFdnXxUt22BpU8Hl1jl1+kuGe3Mx5Pt3w== +-----END PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/ed448.pub.pem b/matrix-authentication-service/crates/jose/tests/keys/ed448.pub.pem new file mode 100644 index 00000000..bf447516 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/ed448.pub.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MEMwBQYDK2VxAzoAKgaYHB+xIpPPvmH2PdbnWT+67/CfJhuD3U90sv+i5CZmGdwt +WOErsowzNYSvuFWk8vztPOERjn4A +-----END PUBLIC KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/jwks.priv.json b/matrix-authentication-service/crates/jose/tests/keys/jwks.priv.json new file mode 100644 index 00000000..0dcaf8c2 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/jwks.priv.json @@ -0,0 +1,67 @@ +{ + "keys": [ + { + "k": "MjNlYWE0MzdjMmFjZTA0ZTM1YTNhNzdiODEzMjEwMGYzNDM4YWE2ODk0NmEzMDU1M2NiZjhmZTgwZTJhNWYzY2E3MDA4MjA1M2Y0YmQ0NDU3NjUzYmJiMTU4NzdkODQ3YzIzMjk0OGVhNzUxMmNjZDY3OWY3MWMyNjhmMWRkMDgK", + "kid": "djHKoWU3rOlWg5E0EI_4FlbETfd4OFQgV98DFXEmGfg", + "kty": "oct" + }, + { + "d": "PvWpVR1APf5Yxf0ZO20wx7jQXOFusJ5nijKAzyWAPFNx_pjoXD82BTaM0h7odiUGP8Bhc8VqctAQudQ_eEga-YXdpbKXL5wHZG0QzEQlZ_MT8tP_S4z_3rWoUyT9S1rPqWUuQvnb-YsfMWR3RuLGpS4a-3f_zUKbUT_RA5aJUWOMSMSOqMCuh4wdhK7GJKOQlWr8yXF6STEOx7nYxWpIxRdcJK3WinBXPZ5B88wgdDpcR0Yv32u-f7h4R5GHmjPPC2NXsWPpNMe8G6CbDQapgk_1mYViq6zpo4NgkrvBUsMRhFkI__Wlcfz12Ds9dBGxg1R-T7lIPY6wIZKUKLYVQQ", + "dp": "PlzIdqVNjdf5bJaRhW9-d3kF-sQJVV1x2prqRKr-FJK79M39fCmrlhsnyxVc7iqt6gSb3Mz_N1rsCbdn1TYUq6RFed3gJO2Bl57_K8L1N7LKjIxm2TpjLTtrMyxwQx72igxdjdvQytN1l6nmoAI76tv4dsw02dyQIsJ3BR7JiGE", + "dq": "rYGSaiGBnWhIJfI3ajRPy5Icuq7dO3pCgdoDPTMPgib-tHxegjx-v4frCq5Uqc9s94FsgR5_FsY3sttmMrYA7MCqGT3n2Peo718DharXhaScWabONJ8wAt2LPbXh74D3931F3hU9cKLWWg9L6Vckh5QPNgSu2ICEH6f0BxCiTUE", + "e": "AQAB", + "kid": "ljAwFsW32expyA0RjrKoOHuZxfk7KLSej8zldO9z4iU", + "kty": "RSA", + "n": "qNYHjNfiXl2SPu7NYSkd5RiF45bo4c_WW4K1NwH-iBwOOb970RvwlcyhctsvrtrUAJ046Z-3LgW27MR73NdcBP6z756XWIQ6CV6XlowG9NlgnEmOolh3XujZuNig-_05anzhTJr6Xl7uxh8o61VQgBjOgDma7cnEJNz2s89nu9f-WOMNage63O02ecA17TsZjU2jYcOCnV5UhsIyVRUcB3S2Jtk8FtRGBIydbkFHCX61atyh8GjzXYpneVPxTm1fRr-qUvGgNJqvK2HhuOzP6wRpcyKS6cl47I9Mu4L36pavtB_WOTXNhtduSYbUvMnkifAGuYTJHpT2e5QzSm2p7Q", + "p": "1djfJgLXpMaNuUaXd7V4bnkN38XC1Of8dubiGO5bFAym1qnExUQmJSq6Uubs3bRLSaa1i1EQqvIB_bBLm6we4m_jQbuNl8-m6dODaZQ2XVWmMWK84zDsRyJL2U-qDbSwt346AI3b3p-rtWlQLftx3qwj0-neAU4Jopfdz6R4HN0", + "q": "yh3QLx9gsrVF3crgejhJiEf4DLiGAglGAJpekeSllPhXWGF_0hw3YurhTm9swLumHhj0MvDNjVHz_fm-W5uLvkO0CW2hfoOtG3cj3AbnVu34v323EK27wTsp3ADMmykKk7Vc7rRtKUNuTawL-rL7ImvRhgiA_1La0NKkIk_yKFE", + "qi": "asGs7u5s2MXkwm0DHcyC9290wmlufSFOUrBgG-o8iahLFnYl9fCazTZJtBrSNQdfSPzr_htSSDav082q1Khaj1dCl_Jn84wuW2zTXgKhWbIvnCHM0GqgWGJ2HKHW3MKhGgitO5xlkv0nuv8znrkJtkQdxdw8x22eSNHUIPh_NtU" + }, + { + "crv": "P-256", + "d": "V9_Lc6jc0hyqFuqSHVaMzXcmWMj34Ib3pG5vls3EbaM", + "kid": "lMbSI69ajsBHHkIpVAFKRKYnR66kGdGteg2oqMzp0_E", + "kty": "EC", + "x": "4rnALl_X1zeOJtDmxz-YiUR1-9QGBfRE90qy_rqe0N0", + "y": "qGl3Telg02usgXK9jQTwcNQRLLovo07vffwaaZ3Dc5o" + }, + { + "crv": "P-384", + "d": "qLmbibr7wHPvUolsAC6I2rD_a78oCA3SvNiOh4C_WvZmtVJtIBDeQ3IGhz8q5lF0", + "kid": "J1EZJGP1LyhX2Zlz8xPcspMQEl8W3bYG2x3NqiMbPyY", + "kty": "EC", + "x": "dT7_3-Wp3kgewAiAyoarKQ2_rL4dketqaUti8nHOIT9K0dGtMGDt5W9uThc4mALN", + "y": "LHdC5G42RYTZcdnnEMftzcx3DOOaeTErrcdMpVTdu0gdjAQDrDwtHGu73E3AeHGp" + }, + { + "crv": "P-521", + "d": "Ae6RJFt67feYXDBIsWEv32WL7MsuiNJgO3A7WEyidaHN1-CP5nIjFN5urf8MvD8fBWqaxdwrAEIA6uPfc8f2U1J8", + "kid": "_xGyI3ms90AvgF658wz971wswSyYnGSG_A0dAgmrAM0", + "kty": "EC", + "x": "ARv-BW1gxTWyey_wil6Sc2iaPuu_iBsEKji2B8UBgW-vIp-JSHzTut1dR1UpkFnZe53EkS6P5kTNm5cBA-r9282J", + "y": "AeVtvHkJslwuPji-M71Zp3DzOostp_keWMB0f1zljl0P1CVhcpC5x4T_D3nEO_zRduL1R0Fv5gE6zaLm8X4cZD5m" + }, + { + "crv": "secp256k1", + "d": "_BuT_AckwrIi8AJpUTkqbxTMaViLXK5z0oIePWf9kkg", + "kid": "n9b4gyd5NgHv7pJ7S27AKBrhBPHa3H5ptcixmYerSUg", + "kty": "EC", + "x": "t-mBmz-Rvh0n3W_bRL_TSOc3Vv0ZB0oGaPZBEqu4sTQ", + "y": "PLm8asqUtHw5gVZN09vA5giJkDIPDOZ4zaG7NR77qSg" + }, + { + "crv": "Ed25519", + "d": "i60Oa0yMwEpc387GZ-_HDdw3C2qBzJHZrC1Bgf85L_k", + "kid": "iXkiyhEh6E7U-aX0fg7w-esHWqPvvxWd6gH1JG2u7N0", + "kty": "OKP", + "x": "nVo63sClAQ8qwBAZW0tttHFhXdrLiKqJnFeJ-j3nA3U" + }, + { + "crv": "Ed448", + "d": "3B2td0gjGaTP_qmLf8l456YZi8wQID4K9bkIAZcK_CJVcV2dfFS3bYGlTweXWOXX6S4Z7czHk-3f", + "kid": "Qlta2qVlhHhg3jriJp0HsIBQqGVB1ZH2pEnyPHzl1Ms", + "kty": "OKP", + "x": "KgaYHB-xIpPPvmH2PdbnWT-67_CfJhuD3U90sv-i5CZmGdwtWOErsowzNYSvuFWk8vztPOERjn4A" + } + ] +} \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/keys/jwks.pub.json b/matrix-authentication-service/crates/jose/tests/keys/jwks.pub.json new file mode 100644 index 00000000..60a85e2c --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/jwks.pub.json @@ -0,0 +1,50 @@ +{ + "keys": [ + { + "e": "AQAB", + "kid": "ljAwFsW32expyA0RjrKoOHuZxfk7KLSej8zldO9z4iU", + "kty": "RSA", + "n": "qNYHjNfiXl2SPu7NYSkd5RiF45bo4c_WW4K1NwH-iBwOOb970RvwlcyhctsvrtrUAJ046Z-3LgW27MR73NdcBP6z756XWIQ6CV6XlowG9NlgnEmOolh3XujZuNig-_05anzhTJr6Xl7uxh8o61VQgBjOgDma7cnEJNz2s89nu9f-WOMNage63O02ecA17TsZjU2jYcOCnV5UhsIyVRUcB3S2Jtk8FtRGBIydbkFHCX61atyh8GjzXYpneVPxTm1fRr-qUvGgNJqvK2HhuOzP6wRpcyKS6cl47I9Mu4L36pavtB_WOTXNhtduSYbUvMnkifAGuYTJHpT2e5QzSm2p7Q" + }, + { + "crv": "P-256", + "kid": "lMbSI69ajsBHHkIpVAFKRKYnR66kGdGteg2oqMzp0_E", + "kty": "EC", + "x": "4rnALl_X1zeOJtDmxz-YiUR1-9QGBfRE90qy_rqe0N0", + "y": "qGl3Telg02usgXK9jQTwcNQRLLovo07vffwaaZ3Dc5o" + }, + { + "crv": "P-384", + "kid": "J1EZJGP1LyhX2Zlz8xPcspMQEl8W3bYG2x3NqiMbPyY", + "kty": "EC", + "x": "dT7_3-Wp3kgewAiAyoarKQ2_rL4dketqaUti8nHOIT9K0dGtMGDt5W9uThc4mALN", + "y": "LHdC5G42RYTZcdnnEMftzcx3DOOaeTErrcdMpVTdu0gdjAQDrDwtHGu73E3AeHGp" + }, + { + "crv": "P-521", + "kid": "_xGyI3ms90AvgF658wz971wswSyYnGSG_A0dAgmrAM0", + "kty": "EC", + "x": "ARv-BW1gxTWyey_wil6Sc2iaPuu_iBsEKji2B8UBgW-vIp-JSHzTut1dR1UpkFnZe53EkS6P5kTNm5cBA-r9282J", + "y": "AeVtvHkJslwuPji-M71Zp3DzOostp_keWMB0f1zljl0P1CVhcpC5x4T_D3nEO_zRduL1R0Fv5gE6zaLm8X4cZD5m" + }, + { + "crv": "secp256k1", + "kid": "n9b4gyd5NgHv7pJ7S27AKBrhBPHa3H5ptcixmYerSUg", + "kty": "EC", + "x": "t-mBmz-Rvh0n3W_bRL_TSOc3Vv0ZB0oGaPZBEqu4sTQ", + "y": "PLm8asqUtHw5gVZN09vA5giJkDIPDOZ4zaG7NR77qSg" + }, + { + "crv": "Ed25519", + "kid": "iXkiyhEh6E7U-aX0fg7w-esHWqPvvxWd6gH1JG2u7N0", + "kty": "OKP", + "x": "nVo63sClAQ8qwBAZW0tttHFhXdrLiKqJnFeJ-j3nA3U" + }, + { + "crv": "Ed448", + "kid": "Qlta2qVlhHhg3jriJp0HsIBQqGVB1ZH2pEnyPHzl1Ms", + "kty": "OKP", + "x": "KgaYHB-xIpPPvmH2PdbnWT-67_CfJhuD3U90sv-i5CZmGdwtWOErsowzNYSvuFWk8vztPOERjn4A" + } + ] +} \ No newline at end of file diff --git a/matrix-authentication-service/crates/jose/tests/keys/k256.priv.pem b/matrix-authentication-service/crates/jose/tests/keys/k256.priv.pem new file mode 100644 index 00000000..ec6b8421 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/k256.priv.pem @@ -0,0 +1,8 @@ +-----BEGIN EC PARAMETERS----- +BgUrgQQACg== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MHQCAQEEIPwbk/wHJMKyIvACaVE5Km8UzGlYi1yuc9KCHj1n/ZJIoAcGBSuBBAAK +oUQDQgAEt+mBmz+Rvh0n3W/bRL/TSOc3Vv0ZB0oGaPZBEqu4sTQ8ubxqypS0fDmB +Vk3T28DmCImQMg8M5njNobs1HvupKA== +-----END EC PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/k256.pub.pem b/matrix-authentication-service/crates/jose/tests/keys/k256.pub.pem new file mode 100644 index 00000000..9b6ee66e --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/k256.pub.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFYwEAYHKoZIzj0CAQYFK4EEAAoDQgAEt+mBmz+Rvh0n3W/bRL/TSOc3Vv0ZB0oG +aPZBEqu4sTQ8ubxqypS0fDmBVk3T28DmCImQMg8M5njNobs1HvupKA== +-----END PUBLIC KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/oct.bin b/matrix-authentication-service/crates/jose/tests/keys/oct.bin new file mode 100644 index 00000000..2d4fd5fb --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/oct.bin @@ -0,0 +1 @@ +23eaa437c2ace04e35a3a77b8132100f3438aa68946a30553cbf8fe80e2a5f3ca70082053f4bd4457653bbb15877d847c232948ea7512ccd679f71c268f1dd08 diff --git a/matrix-authentication-service/crates/jose/tests/keys/p256.priv.pem b/matrix-authentication-service/crates/jose/tests/keys/p256.priv.pem new file mode 100644 index 00000000..ab863128 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/p256.priv.pem @@ -0,0 +1,8 @@ +-----BEGIN EC PARAMETERS----- +BggqhkjOPQMBBw== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIFffy3Oo3NIcqhbqkh1WjM13JljI9+CG96Rub5bNxG2joAoGCCqGSM49 +AwEHoUQDQgAE4rnALl/X1zeOJtDmxz+YiUR1+9QGBfRE90qy/rqe0N2oaXdN6WDT +a6yBcr2NBPBw1BEsui+jTu99/BppncNzmg== +-----END EC PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/p256.pub.pem b/matrix-authentication-service/crates/jose/tests/keys/p256.pub.pem new file mode 100644 index 00000000..9d90ecb4 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/p256.pub.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4rnALl/X1zeOJtDmxz+YiUR1+9QG +BfRE90qy/rqe0N2oaXdN6WDTa6yBcr2NBPBw1BEsui+jTu99/BppncNzmg== +-----END PUBLIC KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/p384.priv.pem b/matrix-authentication-service/crates/jose/tests/keys/p384.priv.pem new file mode 100644 index 00000000..64102003 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/p384.priv.pem @@ -0,0 +1,9 @@ +-----BEGIN EC PARAMETERS----- +BgUrgQQAIg== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MIGkAgEBBDCouZuJuvvAc+9SiWwALojasP9rvygIDdK82I6HgL9a9ma1Um0gEN5D +cgaHPyrmUXSgBwYFK4EEACKhZANiAAR1Pv/f5aneSB7ACIDKhqspDb+svh2R62pp +S2Lycc4hP0rR0a0wYO3lb25OFziYAs0sd0LkbjZFhNlx2ecQx+3NzHcM45p5MSut +x0ylVN27SB2MBAOsPC0ca7vcTcB4cak= +-----END EC PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/p384.pub.pem b/matrix-authentication-service/crates/jose/tests/keys/p384.pub.pem new file mode 100644 index 00000000..7257da82 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/p384.pub.pem @@ -0,0 +1,5 @@ +-----BEGIN PUBLIC KEY----- +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEdT7/3+Wp3kgewAiAyoarKQ2/rL4dketq +aUti8nHOIT9K0dGtMGDt5W9uThc4mALNLHdC5G42RYTZcdnnEMftzcx3DOOaeTEr +rcdMpVTdu0gdjAQDrDwtHGu73E3AeHGp +-----END PUBLIC KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/p521.priv.pem b/matrix-authentication-service/crates/jose/tests/keys/p521.priv.pem new file mode 100644 index 00000000..0cba8282 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/p521.priv.pem @@ -0,0 +1,10 @@ +-----BEGIN EC PARAMETERS----- +BgUrgQQAIw== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MIHcAgEBBEIB7pEkW3rt95hcMEixYS/fZYvsyy6I0mA7cDtYTKJ1oc3X4I/mciMU +3m6t/wy8Px8FaprF3CsAQgDq499zx/ZTUnygBwYFK4EEACOhgYkDgYYABAEb/gVt +YMU1snsv8IpeknNomj7rv4gbBCo4tgfFAYFvryKfiUh807rdXUdVKZBZ2XudxJEu +j+ZEzZuXAQPq/dvNiQHlbbx5CbJcLj44vjO9Wadw8zqLLaf5HljAdH9c5Y5dD9Ql +YXKQuceE/w95xDv80Xbi9UdBb+YBOs2i5vF+HGQ+Zg== +-----END EC PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/p521.pub.pem b/matrix-authentication-service/crates/jose/tests/keys/p521.pub.pem new file mode 100644 index 00000000..560380cd --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/p521.pub.pem @@ -0,0 +1,6 @@ +-----BEGIN PUBLIC KEY----- +MIGbMBAGByqGSM49AgEGBSuBBAAjA4GGAAQBG/4FbWDFNbJ7L/CKXpJzaJo+67+I +GwQqOLYHxQGBb68in4lIfNO63V1HVSmQWdl7ncSRLo/mRM2blwED6v3bzYkB5W28 +eQmyXC4+OL4zvVmncPM6iy2n+R5YwHR/XOWOXQ/UJWFykLnHhP8PecQ7/NF24vVH +QW/mATrNoubxfhxkPmY= +-----END PUBLIC KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/rsa.priv.pem b/matrix-authentication-service/crates/jose/tests/keys/rsa.priv.pem new file mode 100644 index 00000000..5f34f888 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/rsa.priv.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAqNYHjNfiXl2SPu7NYSkd5RiF45bo4c/WW4K1NwH+iBwOOb97 +0RvwlcyhctsvrtrUAJ046Z+3LgW27MR73NdcBP6z756XWIQ6CV6XlowG9NlgnEmO +olh3XujZuNig+/05anzhTJr6Xl7uxh8o61VQgBjOgDma7cnEJNz2s89nu9f+WOMN +age63O02ecA17TsZjU2jYcOCnV5UhsIyVRUcB3S2Jtk8FtRGBIydbkFHCX61atyh +8GjzXYpneVPxTm1fRr+qUvGgNJqvK2HhuOzP6wRpcyKS6cl47I9Mu4L36pavtB/W +OTXNhtduSYbUvMnkifAGuYTJHpT2e5QzSm2p7QIDAQABAoIBAD71qVUdQD3+WMX9 +GTttMMe40FzhbrCeZ4oygM8lgDxTcf6Y6Fw/NgU2jNIe6HYlBj/AYXPFanLQELnU +P3hIGvmF3aWyly+cB2RtEMxEJWfzE/LT/0uM/961qFMk/Utaz6llLkL52/mLHzFk +d0bixqUuGvt3/81Cm1E/0QOWiVFjjEjEjqjAroeMHYSuxiSjkJVq/MlxekkxDse5 +2MVqSMUXXCSt1opwVz2eQfPMIHQ6XEdGL99rvn+4eEeRh5ozzwtjV7Fj6TTHvBug +mw0GqYJP9ZmFYqus6aODYJK7wVLDEYRZCP/1pXH89dg7PXQRsYNUfk+5SD2OsCGS +lCi2FUECgYEA1djfJgLXpMaNuUaXd7V4bnkN38XC1Of8dubiGO5bFAym1qnExUQm +JSq6Uubs3bRLSaa1i1EQqvIB/bBLm6we4m/jQbuNl8+m6dODaZQ2XVWmMWK84zDs +RyJL2U+qDbSwt346AI3b3p+rtWlQLftx3qwj0+neAU4Jopfdz6R4HN0CgYEAyh3Q +Lx9gsrVF3crgejhJiEf4DLiGAglGAJpekeSllPhXWGF/0hw3YurhTm9swLumHhj0 +MvDNjVHz/fm+W5uLvkO0CW2hfoOtG3cj3AbnVu34v323EK27wTsp3ADMmykKk7Vc +7rRtKUNuTawL+rL7ImvRhgiA/1La0NKkIk/yKFECgYA+XMh2pU2N1/lslpGFb353 +eQX6xAlVXXHamupEqv4Ukrv0zf18KauWGyfLFVzuKq3qBJvczP83WuwJt2fVNhSr +pEV53eAk7YGXnv8rwvU3ssqMjGbZOmMtO2szLHBDHvaKDF2N29DK03WXqeagAjvq +2/h2zDTZ3JAiwncFHsmIYQKBgQCtgZJqIYGdaEgl8jdqNE/Lkhy6rt07ekKB2gM9 +Mw+CJv60fF6CPH6/h+sKrlSpz2z3gWyBHn8Wxjey22YytgDswKoZPefY96jvXwOF +qteFpJxZps40nzAC3Ys9teHvgPf3fUXeFT1wotZaD0vpVySHlA82BK7YgIQfp/QH +EKJNQQKBgGrBrO7ubNjF5MJtAx3MgvdvdMJpbn0hTlKwYBvqPImoSxZ2JfXwms02 +SbQa0jUHX0j86/4bUkg2r9PNqtSoWo9XQpfyZ/OMLlts014CoVmyL5whzNBqoFhi +dhyh1tzCoRoIrTucZZL9J7r/M565CbZEHcXcPMdtnkjR1CD4fzbV +-----END RSA PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/keys/rsa.pub.pem b/matrix-authentication-service/crates/jose/tests/keys/rsa.pub.pem new file mode 100644 index 00000000..c85b229d --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/keys/rsa.pub.pem @@ -0,0 +1,9 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqNYHjNfiXl2SPu7NYSkd +5RiF45bo4c/WW4K1NwH+iBwOOb970RvwlcyhctsvrtrUAJ046Z+3LgW27MR73Ndc +BP6z756XWIQ6CV6XlowG9NlgnEmOolh3XujZuNig+/05anzhTJr6Xl7uxh8o61VQ +gBjOgDma7cnEJNz2s89nu9f+WOMNage63O02ecA17TsZjU2jYcOCnV5UhsIyVRUc +B3S2Jtk8FtRGBIydbkFHCX61atyh8GjzXYpneVPxTm1fRr+qUvGgNJqvK2HhuOzP +6wRpcyKS6cl47I9Mu4L36pavtB/WOTXNhtduSYbUvMnkifAGuYTJHpT2e5QzSm2p +7QIDAQAB +-----END PUBLIC KEY----- diff --git a/matrix-authentication-service/crates/jose/tests/snapshots/jws__es256__sign_jwt.snap b/matrix-authentication-service/crates/jose/tests/snapshots/jws__es256__sign_jwt.snap new file mode 100644 index 00000000..ee4410b0 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/snapshots/jws__es256__sign_jwt.snap @@ -0,0 +1,5 @@ +--- +source: crates/jose/tests/jws.rs +expression: jwt.as_str() +--- +eyJhbGciOiJFUzI1NiJ9.eyJoZWxsbyI6IndvcmxkIn0._3wYtQklt0l_fhcwpQUSWbySVA3uJjVNgoudkvUInWjPpS7tO0sgmPf8Bwb3Rv9oTJncQfavs4rEw2kmgouPBw diff --git a/matrix-authentication-service/crates/jose/tests/snapshots/jws__es256k__sign_jwt.snap b/matrix-authentication-service/crates/jose/tests/snapshots/jws__es256k__sign_jwt.snap new file mode 100644 index 00000000..9881aecf --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/snapshots/jws__es256k__sign_jwt.snap @@ -0,0 +1,5 @@ +--- +source: crates/jose/tests/jws.rs +expression: jwt.as_str() +--- +eyJhbGciOiJFUzI1NksifQ.eyJoZWxsbyI6IndvcmxkIn0.-9Z19RYab_3Ym4Ork_lZUriouz5ktZFkT6B-DBGPYCJhVvSSNtG9Je9PEo0xpe9al0NhFcG5YJ4s4usDicsVjQ diff --git a/matrix-authentication-service/crates/jose/tests/snapshots/jws__es384__sign_jwt.snap b/matrix-authentication-service/crates/jose/tests/snapshots/jws__es384__sign_jwt.snap new file mode 100644 index 00000000..854b5924 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/snapshots/jws__es384__sign_jwt.snap @@ -0,0 +1,5 @@ +--- +source: crates/jose/tests/jws.rs +expression: jwt.as_str() +--- +eyJhbGciOiJFUzM4NCJ9.eyJoZWxsbyI6IndvcmxkIn0.QIX0_gN6orAY32t6gKiDnstNdnBAmf1D5y-000ym-C8Y_MGt-HReODkUIMl7k6FNS1kw1FSbNXhXAPnAfcfgg2rR7oWDWfdxY5D0u1DcFGmhIrU5mxcUG50I_5YHIbe2 diff --git a/matrix-authentication-service/crates/jose/tests/snapshots/jws__ps256__sign_jwt.snap b/matrix-authentication-service/crates/jose/tests/snapshots/jws__ps256__sign_jwt.snap new file mode 100644 index 00000000..44f7cfdd --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/snapshots/jws__ps256__sign_jwt.snap @@ -0,0 +1,5 @@ +--- +source: crates/jose/tests/jws.rs +expression: jwt.as_str() +--- +eyJhbGciOiJQUzI1NiJ9.eyJoZWxsbyI6IndvcmxkIn0.CupFwPDQkECCpxd9y0y4hdPccVa387MXe8jMnI5Q0nWwdXqJ9PCyEGOfdBDwFqAfWGYlTkcDjTua81K6tV2ctnFRd9mqs_i1PyhLp8PFO9PcdxtqQKRgA0M4CEA_Yd-7mDFeh4raHgWX6xoNGnEoqrPrp-Vl4jQzdXVpY-J_PKuam_0PlXv-pk3uBW5RD8HU1J8injsUp2FRIJfnOGok4ZnXZqy4_jKkBgu35ymgn011MvLKjHnwTSWteHHc1CVUmJ-txiCaQGWL-6sz0tKdpEpekDCXyygaabn4rDtxm4Be2NeS1Nm852pwzg78SLgxgGPs9uxOx-cH66nWX6Ct9w diff --git a/matrix-authentication-service/crates/jose/tests/snapshots/jws__ps384__sign_jwt.snap b/matrix-authentication-service/crates/jose/tests/snapshots/jws__ps384__sign_jwt.snap new file mode 100644 index 00000000..be21c49f --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/snapshots/jws__ps384__sign_jwt.snap @@ -0,0 +1,5 @@ +--- +source: crates/jose/tests/jws.rs +expression: jwt.as_str() +--- +eyJhbGciOiJQUzM4NCJ9.eyJoZWxsbyI6IndvcmxkIn0.IlvyM131OVgUdNUlnAFDC4ZgIUtF_rzM_mOYasKi9WMB6d83AD-CRSnpkCXjSRS6WXx8fcLl5WA5COAMTG7PiDZlCxQ2zWsBn4SF2e8ARAiCsEGkkHhY6r68mXq86bdVD_46RKOnpBBK_DGu_ZHFY7Cjo6SGYol57HKIoGhTi79qQd0tYPdqNYO02KOTsR83-ph5vdEdM4jLg81X7--rH08Zhtnywu1JnmtxEotTvtbwXB1tDTTZvgywzgP63krP44D5hH-PlKLw4Bia_LQkSE4OE1HfDsK1IK4Y7SniJTrTQXp5FVASPrQnF2-lJUz_oDqzTKAv7FXCcCz1iPKbvg diff --git a/matrix-authentication-service/crates/jose/tests/snapshots/jws__ps512__sign_jwt.snap b/matrix-authentication-service/crates/jose/tests/snapshots/jws__ps512__sign_jwt.snap new file mode 100644 index 00000000..62cef2df --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/snapshots/jws__ps512__sign_jwt.snap @@ -0,0 +1,5 @@ +--- +source: crates/jose/tests/jws.rs +expression: jwt.as_str() +--- +eyJhbGciOiJQUzUxMiJ9.eyJoZWxsbyI6IndvcmxkIn0.Chyx9_a-dAyy2tB5hgj3SzLCoDSFx7GxO1PnFCrPN0z8pVRpOTrHaHDVlqPq0IjIGwPAcrTpNtwTIJdjNcpck9nyTShOUQya0tAGCrV1hbxR_QLGPayJydq8_treTKHeGxby4RaInM8k_hLz-6136FDiZXSxtZ6p4mCEcWeYiG5WGVqY15YptCuIipsY01Fyrew8djnIgW9bqS0aP9pakQWOIigYavFxhrLzyutgXiNxsNSH8OTCh9UQr62xEePJWsXkZIkSqtQlEnK68qhSgLffinyDtDMS7CAt82Lh0ac3vqRVyM0w4_l2C-auLE1aeAAroAhnc9YLVg0BufvydQ diff --git a/matrix-authentication-service/crates/jose/tests/snapshots/jws__rs256__sign_jwt.snap b/matrix-authentication-service/crates/jose/tests/snapshots/jws__rs256__sign_jwt.snap new file mode 100644 index 00000000..9c8824f9 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/snapshots/jws__rs256__sign_jwt.snap @@ -0,0 +1,5 @@ +--- +source: crates/jose/tests/jws.rs +expression: jwt.as_str() +--- +eyJhbGciOiJSUzI1NiJ9.eyJoZWxsbyI6IndvcmxkIn0.ji96-idJ7VHafGOGt22nJPVSDC6S2XvZSUFG7TLrjv-_ylINko_9YsI9_-9UZcB5ZtMeCX6Z5eO_9MTaq3Fhcj7mdn_hozZaNseTVgnwkFfTBlF7HcWhBdWbihAoY1YDvhTu-l_L6iBt1KhQh3J6fsfeGB-l3JfygZLKLtM1gsEz2qaZpnM90wESpphvpaJ_rGlWcTu61DGBBB3kOGCgaG2CJypCKp67m2vxFfi7J_2yE-1H2Y9ACWye73TWNuZubXNdo6azqqiJRe9o6oFmuPwkjgld66MdshQWjo3sGPHPI1_V-nhR9AtoizzF-3_YoS9oVwAzL6GiVUzeKpvZfQ diff --git a/matrix-authentication-service/crates/jose/tests/snapshots/jws__rs384__sign_jwt.snap b/matrix-authentication-service/crates/jose/tests/snapshots/jws__rs384__sign_jwt.snap new file mode 100644 index 00000000..e7e6c67e --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/snapshots/jws__rs384__sign_jwt.snap @@ -0,0 +1,5 @@ +--- +source: crates/jose/tests/jws.rs +expression: jwt.as_str() +--- +eyJhbGciOiJSUzM4NCJ9.eyJoZWxsbyI6IndvcmxkIn0.UgY6PfaVQ3Rhz_RvS8YZmCjUIcchejdWcf5zvSRK0ANGB1r2yvcdvGkOeVsFdKW_z7oru_4jTOffLgm8NoYVvg_x44u_z63ENrQTGbO0QLOLZKI4fuEvKDrKpkf2BmSPa-2feKQECVXxCcIiR32Q_zTHJtTIaDV2-hk2W_CEJxCVqLZ4b6l5iI2qLKUS3vERDKdwA2igiA_NElv4KThCtNIoS8TBohwio-M-SV43i-aJHnyn2U6Uw3Gu1mCSIBeRUNoQPXFBFnWY1Pa5TrxPA2jekck9j_xCWOX_jWK1khBW1lMwzYC5Ry24S7QxOcg8l2x8I6J03gB4N651fhcKgQ diff --git a/matrix-authentication-service/crates/jose/tests/snapshots/jws__rs512__sign_jwt.snap b/matrix-authentication-service/crates/jose/tests/snapshots/jws__rs512__sign_jwt.snap new file mode 100644 index 00000000..f16ee078 --- /dev/null +++ b/matrix-authentication-service/crates/jose/tests/snapshots/jws__rs512__sign_jwt.snap @@ -0,0 +1,5 @@ +--- +source: crates/jose/tests/jws.rs +expression: jwt.as_str() +--- +eyJhbGciOiJSUzUxMiJ9.eyJoZWxsbyI6IndvcmxkIn0.HMs8F0DuJbLh0mjhXh5-PE66m8hwjdRP0_ixm_LKmeieAmJrerObyKHtstOdaLO0l_r3XXg2bHjzwGNSn3XF5Gj0RgqRqW6T5X8CO_Kf__0B-lTUfiXpxyLMhb3Vkt9fRa1YZjVix8hGsEx8oerA_xqv1DzgdKNvO4kK_Vzykuz5bgLn2oQR1w1NARCqazmjKh4S9q9XS8BZ-Ke2xTLSOpLP4g67IGyo79Y_BZ0-mOgBWZmPGzJnBGOrv4Lc-Vn3kPNZqREM9DA9IILw1hbCRG6x31pM5u1PESIV1dSuoIaab5A9yfBx1Fr9PRxV-1qHRaRYi06E_q_jxwtPG2oM7w diff --git a/matrix-authentication-service/crates/keystore/Cargo.toml b/matrix-authentication-service/crates/keystore/Cargo.toml new file mode 100644 index 00000000..0b97e5d8 --- /dev/null +++ b/matrix-authentication-service/crates/keystore/Cargo.toml @@ -0,0 +1,45 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-keystore" +description = "Secret keys store used by the Matrix Authentication Service" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +aead.workspace = true +base64ct.workspace = true +chacha20poly1305.workspace = true +const-oid.workspace = true +der.workspace = true +elliptic-curve.workspace = true +generic-array.workspace = true +k256.workspace = true +p256.workspace = true +p384.workspace = true +pem-rfc7468.workspace = true +pkcs1.workspace = true +pkcs8.workspace = true +rand.workspace = true +rsa.workspace = true +sec1.workspace = true +spki.workspace = true +thiserror.workspace = true + +mas-iana.workspace = true +mas-jose.workspace = true + +[dev-dependencies] +insta.workspace = true +rand_chacha.workspace = true diff --git a/matrix-authentication-service/crates/keystore/src/encrypter.rs b/matrix-authentication-service/crates/keystore/src/encrypter.rs new file mode 100644 index 00000000..a2cb738e --- /dev/null +++ b/matrix-authentication-service/crates/keystore/src/encrypter.rs @@ -0,0 +1,94 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use aead::Aead; +use base64ct::{Base64, Encoding}; +use chacha20poly1305::{ChaCha20Poly1305, KeyInit}; +use generic_array::GenericArray; +use thiserror::Error; + +/// Helps encrypting and decrypting data +#[derive(Clone)] +pub struct Encrypter { + aead: Arc, +} + +#[derive(Debug, Error)] +#[error("Decryption error")] +pub enum DecryptError { + Aead(#[from] aead::Error), + Base64(#[from] base64ct::Error), + Shape, +} + +impl Encrypter { + /// Creates an [`Encrypter`] out of an encryption key + #[must_use] + pub fn new(key: &[u8; 32]) -> Self { + let key = GenericArray::from_slice(key); + let aead = ChaCha20Poly1305::new(key); + let aead = Arc::new(aead); + Self { aead } + } + + /// Encrypt a payload + /// + /// # Errors + /// + /// Will return `Err` when the payload failed to encrypt + pub fn encrypt(&self, nonce: &[u8; 12], decrypted: &[u8]) -> Result, aead::Error> { + let nonce = GenericArray::from_slice(&nonce[..]); + let encrypted = self.aead.encrypt(nonce, decrypted)?; + Ok(encrypted) + } + + /// Decrypts a payload + /// + /// # Errors + /// + /// Will return `Err` when the payload failed to decrypt + pub fn decrypt(&self, nonce: &[u8; 12], encrypted: &[u8]) -> Result, aead::Error> { + let nonce = GenericArray::from_slice(&nonce[..]); + let encrypted = self.aead.decrypt(nonce, encrypted)?; + Ok(encrypted) + } + + /// Encrypt a payload to a self-contained base64-encoded string + /// + /// # Errors + /// + /// Will return `Err` when the payload failed to encrypt + pub fn encrypt_to_string(&self, decrypted: &[u8]) -> Result { + let nonce = rand::random(); + let encrypted = self.encrypt(&nonce, decrypted)?; + let encrypted = [&nonce[..], &encrypted].concat(); + let encrypted = Base64::encode_string(&encrypted); + Ok(encrypted) + } + + /// Decrypt a payload from a self-contained base64-encoded string + /// + /// # Errors + /// + /// Will return `Err` when the payload failed to decrypt + pub fn decrypt_string(&self, encrypted: &str) -> Result, DecryptError> { + let encrypted = Base64::decode_vec(encrypted)?; + + let nonce: &[u8; 12] = encrypted + .get(0..12) + .ok_or(DecryptError::Shape)? + .try_into() + .map_err(|_| DecryptError::Shape)?; + + let payload = encrypted.get(12..).ok_or(DecryptError::Shape)?; + + let decrypted_client_secret = self.decrypt(nonce, payload)?; + + Ok(decrypted_client_secret) + } +} diff --git a/matrix-authentication-service/crates/keystore/src/lib.rs b/matrix-authentication-service/crates/keystore/src/lib.rs new file mode 100644 index 00000000..fa9d305b --- /dev/null +++ b/matrix-authentication-service/crates/keystore/src/lib.rs @@ -0,0 +1,642 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A crate to store keys which can then be used to sign and verify JWTs. + +use std::{ops::Deref, sync::Arc}; + +use der::{Decode, Encode, EncodePem, zeroize::Zeroizing}; +use elliptic_curve::{pkcs8::EncodePrivateKey, sec1::ToEncodedPoint}; +use mas_iana::jose::{JsonWebKeyType, JsonWebSignatureAlg}; +pub use mas_jose::jwk::{JsonWebKey, JsonWebKeySet}; +use mas_jose::{ + jwa::{AsymmetricSigningKey, AsymmetricVerifyingKey}, + jwk::{JsonWebKeyPublicParameters, ParametersInfo, PublicJsonWebKeySet, Thumbprint}, +}; +use pem_rfc7468::PemLabel; +use pkcs1::EncodeRsaPrivateKey; +use pkcs8::{AssociatedOid, PrivateKeyInfo}; +use rand::{CryptoRng, RngCore}; +use rsa::BigUint; +use thiserror::Error; + +mod encrypter; + +pub use aead; + +pub use self::encrypter::{DecryptError, Encrypter}; + +/// Error type used when a key could not be loaded +#[derive(Debug, Error)] +pub enum LoadError { + #[error("Failed to read PEM document")] + Pem { + #[from] + inner: pem_rfc7468::Error, + }, + + #[error("Invalid RSA private key")] + Rsa { + #[from] + inner: rsa::errors::Error, + }, + + #[error("Failed to decode PKCS1-encoded RSA key")] + Pkcs1 { + #[from] + inner: pkcs1::Error, + }, + + #[error("Failed to decode PKCS8-encoded key")] + Pkcs8 { + #[from] + inner: pkcs8::Error, + }, + + #[error(transparent)] + Der { + #[from] + inner: der::Error, + }, + + #[error(transparent)] + Spki { + #[from] + inner: spki::Error, + }, + + #[error("Unknown Elliptic Curve OID {oid}")] + UnknownEllipticCurveOid { oid: const_oid::ObjectIdentifier }, + + #[error("Unknown algorithm OID {oid}")] + UnknownAlgorithmOid { oid: const_oid::ObjectIdentifier }, + + #[error("Unsupported PEM label {label:?}")] + UnsupportedPemLabel { label: String }, + + #[error("Missing parameters in SEC1 key")] + MissingSec1Parameters, + + #[error("Missing curve name in SEC1 parameters")] + MissingSec1CurveName, + + #[error("Key is encrypted and no password was provided")] + Encrypted, + + #[error("Key is not encrypted but a password was provided")] + Unencrypted, + + #[error("Unsupported format")] + UnsupportedFormat, + + #[error("Could not decode encrypted payload")] + InEncrypted { + #[source] + inner: Box, + }, +} + +impl LoadError { + /// Returns `true` if the load error is [`Encrypted`]. + /// + /// [`Encrypted`]: LoadError::Encrypted + #[must_use] + pub fn is_encrypted(&self) -> bool { + matches!(self, Self::Encrypted) + } + + /// Returns `true` if the load error is [`Unencrypted`]. + /// + /// [`Unencrypted`]: LoadError::Unencrypted + #[must_use] + pub fn is_unencrypted(&self) -> bool { + matches!(self, Self::Unencrypted) + } +} + +/// A single private key +#[non_exhaustive] +#[derive(Debug)] +pub enum PrivateKey { + Rsa(Box), + EcP256(Box>), + EcP384(Box>), + EcK256(Box>), +} + +/// Error returned when the key can't be used for the requested algorithm +#[derive(Debug, Error)] +#[error("Wrong algorithm for key")] +pub struct WrongAlgorithmError; + +impl PrivateKey { + fn from_pkcs1_private_key(pkcs1_key: &pkcs1::RsaPrivateKey) -> Result { + // Taken from `TryFrom> for RsaPrivateKey` + + // Multi-prime RSA keys not currently supported + if pkcs1_key.version() != pkcs1::Version::TwoPrime { + return Err(pkcs1::Error::Version.into()); + } + + let n = BigUint::from_bytes_be(pkcs1_key.modulus.as_bytes()); + let e = BigUint::from_bytes_be(pkcs1_key.public_exponent.as_bytes()); + let d = BigUint::from_bytes_be(pkcs1_key.private_exponent.as_bytes()); + let first_prime = BigUint::from_bytes_be(pkcs1_key.prime1.as_bytes()); + let second_prime = BigUint::from_bytes_be(pkcs1_key.prime2.as_bytes()); + let primes = vec![first_prime, second_prime]; + let key = rsa::RsaPrivateKey::from_components(n, e, d, primes)?; + Ok(Self::Rsa(Box::new(key))) + } + + fn from_private_key_info(info: PrivateKeyInfo) -> Result { + match info.algorithm.oid { + pkcs1::ALGORITHM_OID => Ok(Self::Rsa(Box::new(info.try_into()?))), + elliptic_curve::ALGORITHM_OID => match info.algorithm.parameters_oid()? { + p256::NistP256::OID => Ok(Self::EcP256(Box::new(info.try_into()?))), + p384::NistP384::OID => Ok(Self::EcP384(Box::new(info.try_into()?))), + k256::Secp256k1::OID => Ok(Self::EcK256(Box::new(info.try_into()?))), + oid => Err(LoadError::UnknownEllipticCurveOid { oid }), + }, + oid => Err(LoadError::UnknownAlgorithmOid { oid }), + } + } + + fn from_ec_private_key(key: sec1::EcPrivateKey) -> Result { + let curve = key + .parameters + .ok_or(LoadError::MissingSec1Parameters)? + .named_curve() + .ok_or(LoadError::MissingSec1CurveName)?; + + match curve { + p256::NistP256::OID => Ok(Self::EcP256(Box::new(key.try_into()?))), + p384::NistP384::OID => Ok(Self::EcP384(Box::new(key.try_into()?))), + k256::Secp256k1::OID => Ok(Self::EcK256(Box::new(key.try_into()?))), + oid => Err(LoadError::UnknownEllipticCurveOid { oid }), + } + } + + /// Serialize the key as a DER document + /// + /// It will use the most common format depending on the key type: PKCS1 for + /// RSA keys and SEC1 for elliptic curve keys + /// + /// # Errors + /// + /// Returns an error if the encoding failed + pub fn to_der(&self) -> Result>, pkcs1::Error> { + let der = match self { + PrivateKey::Rsa(key) => key.to_pkcs1_der()?.to_bytes(), + PrivateKey::EcP256(key) => to_sec1_der(key)?, + PrivateKey::EcP384(key) => to_sec1_der(key)?, + PrivateKey::EcK256(key) => to_sec1_der(key)?, + }; + + Ok(der) + } + + /// Serialize the key as a PKCS8 DER document + /// + /// # Errors + /// + /// Returns an error if the encoding failed + pub fn to_pkcs8_der(&self) -> Result>, pkcs8::Error> { + let der = match self { + PrivateKey::Rsa(key) => key.to_pkcs8_der()?, + PrivateKey::EcP256(key) => key.to_pkcs8_der()?, + PrivateKey::EcP384(key) => key.to_pkcs8_der()?, + PrivateKey::EcK256(key) => key.to_pkcs8_der()?, + }; + + Ok(der.to_bytes()) + } + + /// Serialize the key as a PEM document + /// + /// It will use the most common format depending on the key type: PKCS1 for + /// RSA keys and SEC1 for elliptic curve keys + /// + /// # Errors + /// + /// Returns an error if the encoding failed + pub fn to_pem( + &self, + line_ending: pem_rfc7468::LineEnding, + ) -> Result, pkcs1::Error> { + let pem = match self { + PrivateKey::Rsa(key) => key.to_pkcs1_pem(line_ending)?, + PrivateKey::EcP256(key) => to_sec1_pem(key, line_ending)?, + PrivateKey::EcP384(key) => to_sec1_pem(key, line_ending)?, + PrivateKey::EcK256(key) => to_sec1_pem(key, line_ending)?, + }; + + Ok(pem) + } + + /// Load an unencrypted PEM or DER encoded key + /// + /// # Errors + /// + /// Returns the same kind of errors as [`Self::load_pem`] and + /// [`Self::load_der`]. + pub fn load(bytes: &[u8]) -> Result { + if let Ok(pem) = std::str::from_utf8(bytes) { + match Self::load_pem(pem) { + Ok(s) => return Ok(s), + // If there was an error loading the document as PEM, ignore it and continue by + // trying to load it as DER + Err(LoadError::Pem { .. }) => {} + Err(e) => return Err(e), + } + } + + Self::load_der(bytes) + } + + /// Load an encrypted PEM or DER encoded key, and decrypt it with the given + /// password + /// + /// # Errors + /// + /// Returns the same kind of errors as [`Self::load_encrypted_pem`] and + /// [`Self::load_encrypted_der`]. + pub fn load_encrypted(bytes: &[u8], password: impl AsRef<[u8]>) -> Result { + if let Ok(pem) = std::str::from_utf8(bytes) { + match Self::load_encrypted_pem(pem, password.as_ref()) { + Ok(s) => return Ok(s), + // If there was an error loading the document as PEM, ignore it and continue by + // trying to load it as DER + Err(LoadError::Pem { .. }) => {} + Err(e) => return Err(e), + } + } + + Self::load_encrypted_der(bytes, password) + } + + /// Load an encrypted key from DER-encoded bytes, and decrypt it with the + /// given password + /// + /// # Errors + /// + /// Returns an error if: + /// - the key is in an non-encrypted format + /// - the key could not be decrypted + /// - the PKCS8 key could not be loaded + pub fn load_encrypted_der(der: &[u8], password: impl AsRef<[u8]>) -> Result { + if let Ok(info) = pkcs8::EncryptedPrivateKeyInfo::from_der(der) { + let decrypted = info.decrypt(password)?; + return Self::load_der(decrypted.as_bytes()).map_err(|inner| LoadError::InEncrypted { + inner: Box::new(inner), + }); + } + + if pkcs8::PrivateKeyInfo::from_der(der).is_ok() + || sec1::EcPrivateKey::from_der(der).is_ok() + || pkcs1::RsaPrivateKey::from_der(der).is_ok() + { + return Err(LoadError::Unencrypted); + } + + Err(LoadError::UnsupportedFormat) + } + + /// Load an unencrypted key from DER-encoded bytes + /// + /// It tries to decode the bytes from the various known DER formats (PKCS8, + /// SEC1 and PKCS1, in that order), and return the first one that works. + /// + /// # Errors + /// + /// Returns an error if: + /// - the PKCS8 key is encrypted + /// - none of the formats could be decoded + /// - the PKCS8/SEC1/PKCS1 key could not be loaded + pub fn load_der(der: &[u8]) -> Result { + // Let's try evey known DER format one after the other + if pkcs8::EncryptedPrivateKeyInfo::from_der(der).is_ok() { + return Err(LoadError::Encrypted); + } + + if let Ok(info) = pkcs8::PrivateKeyInfo::from_der(der) { + return Self::from_private_key_info(info); + } + + if let Ok(info) = sec1::EcPrivateKey::from_der(der) { + return Self::from_ec_private_key(info); + } + + if let Ok(pkcs1_key) = pkcs1::RsaPrivateKey::from_der(der) { + return Self::from_pkcs1_private_key(&pkcs1_key); + } + + Err(LoadError::UnsupportedFormat) + } + + /// Load an encrypted key from a PEM-encode string, and decrypt it with the + /// given password + /// + /// # Errors + /// + /// Returns an error if: + /// - the file is not a signel PEM document + /// - the PEM label is not a supported format + /// - the underlying key is not encrypted (use [`Self::load`] instead) + /// - the decryption failed + /// - the pkcs8 key could not be loaded + pub fn load_encrypted_pem(pem: &str, password: impl AsRef<[u8]>) -> Result { + let (label, doc) = pem_rfc7468::decode_vec(pem.as_bytes())?; + + match label { + pkcs8::EncryptedPrivateKeyInfo::PEM_LABEL => { + let info = pkcs8::EncryptedPrivateKeyInfo::from_der(&doc)?; + let decrypted = info.decrypt(password)?; + Self::load_der(decrypted.as_bytes()).map_err(|inner| LoadError::InEncrypted { + inner: Box::new(inner), + }) + } + + pkcs1::RsaPrivateKey::PEM_LABEL + | pkcs8::PrivateKeyInfo::PEM_LABEL + | sec1::EcPrivateKey::PEM_LABEL => Err(LoadError::Unencrypted), + + label => Err(LoadError::UnsupportedPemLabel { + label: label.to_owned(), + }), + } + } + + /// Load an unencrypted key from a PEM-encode string + /// + /// # Errors + /// + /// Returns an error if: + /// - the file is not a signel PEM document + /// - the PEM label is not a supported format + /// - the underlying key is encrypted (use [`Self::load_encrypted`] + /// instead) + /// - the PKCS8/PKCS1/SEC1 key could not be loaded + pub fn load_pem(pem: &str) -> Result { + let (label, doc) = pem_rfc7468::decode_vec(pem.as_bytes())?; + + match label { + pkcs1::RsaPrivateKey::PEM_LABEL => { + let pkcs1_key = pkcs1::RsaPrivateKey::from_der(&doc)?; + Self::from_pkcs1_private_key(&pkcs1_key) + } + + pkcs8::PrivateKeyInfo::PEM_LABEL => { + let info = pkcs8::PrivateKeyInfo::from_der(&doc)?; + Self::from_private_key_info(info) + } + + sec1::EcPrivateKey::PEM_LABEL => { + let key = sec1::EcPrivateKey::from_der(&doc)?; + Self::from_ec_private_key(key) + } + + pkcs8::EncryptedPrivateKeyInfo::PEM_LABEL => Err(LoadError::Encrypted), + + label => Err(LoadError::UnsupportedPemLabel { + label: label.to_owned(), + }), + } + } + + /// Get an [`AsymmetricVerifyingKey`] out of this key, for the specified + /// [`JsonWebSignatureAlg`] + /// + /// # Errors + /// + /// Returns an error if the key is not suited for the selected algorithm + pub fn verifying_key_for_alg( + &self, + alg: &JsonWebSignatureAlg, + ) -> Result { + let key = match (self, alg) { + (Self::Rsa(key), _) => { + let key: rsa::RsaPublicKey = key.to_public_key(); + match alg { + JsonWebSignatureAlg::Rs256 => AsymmetricVerifyingKey::rs256(key), + JsonWebSignatureAlg::Rs384 => AsymmetricVerifyingKey::rs384(key), + JsonWebSignatureAlg::Rs512 => AsymmetricVerifyingKey::rs512(key), + JsonWebSignatureAlg::Ps256 => AsymmetricVerifyingKey::ps256(key), + JsonWebSignatureAlg::Ps384 => AsymmetricVerifyingKey::ps384(key), + JsonWebSignatureAlg::Ps512 => AsymmetricVerifyingKey::ps512(key), + _ => return Err(WrongAlgorithmError), + } + } + + (Self::EcP256(key), JsonWebSignatureAlg::Es256) => { + AsymmetricVerifyingKey::es256(key.public_key()) + } + + (Self::EcP384(key), JsonWebSignatureAlg::Es384) => { + AsymmetricVerifyingKey::es384(key.public_key()) + } + + (Self::EcK256(key), JsonWebSignatureAlg::Es256K) => { + AsymmetricVerifyingKey::es256k(key.public_key()) + } + + _ => return Err(WrongAlgorithmError), + }; + + Ok(key) + } + + /// Get a [`AsymmetricSigningKey`] out of this key, for the specified + /// [`JsonWebSignatureAlg`] + /// + /// # Errors + /// + /// Returns an error if the key is not suited for the selected algorithm + pub fn signing_key_for_alg( + &self, + alg: &JsonWebSignatureAlg, + ) -> Result { + let key = match (self, alg) { + (Self::Rsa(key), _) => { + let key: rsa::RsaPrivateKey = *key.clone(); + match alg { + JsonWebSignatureAlg::Rs256 => AsymmetricSigningKey::rs256(key), + JsonWebSignatureAlg::Rs384 => AsymmetricSigningKey::rs384(key), + JsonWebSignatureAlg::Rs512 => AsymmetricSigningKey::rs512(key), + JsonWebSignatureAlg::Ps256 => AsymmetricSigningKey::ps256(key), + JsonWebSignatureAlg::Ps384 => AsymmetricSigningKey::ps384(key), + JsonWebSignatureAlg::Ps512 => AsymmetricSigningKey::ps512(key), + _ => return Err(WrongAlgorithmError), + } + } + + (Self::EcP256(key), JsonWebSignatureAlg::Es256) => { + AsymmetricSigningKey::es256(*key.clone()) + } + + (Self::EcP384(key), JsonWebSignatureAlg::Es384) => { + AsymmetricSigningKey::es384(*key.clone()) + } + + (Self::EcK256(key), JsonWebSignatureAlg::Es256K) => { + AsymmetricSigningKey::es256k(*key.clone()) + } + + _ => return Err(WrongAlgorithmError), + }; + + Ok(key) + } + + /// Generate a RSA key with 2048 bit size + /// + /// # Errors + /// + /// Returns any error from the underlying key generator + pub fn generate_rsa(mut rng: R) -> Result { + let key = rsa::RsaPrivateKey::new(&mut rng, 2048)?; + Ok(Self::Rsa(Box::new(key))) + } + + /// Generate an Elliptic Curve key for the P-256 curve + pub fn generate_ec_p256(mut rng: R) -> Self { + let key = elliptic_curve::SecretKey::random(&mut rng); + Self::EcP256(Box::new(key)) + } + + /// Generate an Elliptic Curve key for the P-384 curve + pub fn generate_ec_p384(mut rng: R) -> Self { + let key = elliptic_curve::SecretKey::random(&mut rng); + Self::EcP384(Box::new(key)) + } + + /// Generate an Elliptic Curve key for the secp256k1 curve + pub fn generate_ec_k256(mut rng: R) -> Self { + let key = elliptic_curve::SecretKey::random(&mut rng); + Self::EcK256(Box::new(key)) + } +} + +// The default implementation of SecretKey::to_sec1_pem/der do not include the +// named curve OID. This is a basic reimplementation of those two functions with +// the OID included, so that it matches the implementation in OpenSSL. +fn to_sec1_der(key: &elliptic_curve::SecretKey) -> Result>, der::Error> +where + C: elliptic_curve::Curve + elliptic_curve::CurveArithmetic + AssociatedOid, + elliptic_curve::PublicKey: elliptic_curve::sec1::ToEncodedPoint, + C::FieldBytesSize: elliptic_curve::sec1::ModulusSize, +{ + let private_key_bytes = Zeroizing::new(key.to_bytes()); + let public_key_bytes = key.public_key().to_encoded_point(false); + Ok(Zeroizing::new( + sec1::EcPrivateKey { + private_key: &private_key_bytes, + parameters: Some(sec1::EcParameters::NamedCurve(C::OID)), + public_key: Some(public_key_bytes.as_bytes()), + } + .to_der()?, + )) +} + +fn to_sec1_pem( + key: &elliptic_curve::SecretKey, + line_ending: pem_rfc7468::LineEnding, +) -> Result, der::Error> +where + C: elliptic_curve::Curve + elliptic_curve::CurveArithmetic + AssociatedOid, + elliptic_curve::PublicKey: elliptic_curve::sec1::ToEncodedPoint, + C::FieldBytesSize: elliptic_curve::sec1::ModulusSize, +{ + let private_key_bytes = Zeroizing::new(key.to_bytes()); + let public_key_bytes = key.public_key().to_encoded_point(false); + Ok(Zeroizing::new( + sec1::EcPrivateKey { + private_key: &private_key_bytes, + parameters: Some(sec1::EcParameters::NamedCurve(C::OID)), + public_key: Some(public_key_bytes.as_bytes()), + } + .to_pem(line_ending)?, + )) +} + +impl From<&PrivateKey> for JsonWebKeyPublicParameters { + fn from(val: &PrivateKey) -> Self { + match val { + PrivateKey::Rsa(key) => key.to_public_key().into(), + PrivateKey::EcP256(key) => key.public_key().into(), + PrivateKey::EcP384(key) => key.public_key().into(), + PrivateKey::EcK256(key) => key.public_key().into(), + } + } +} + +impl ParametersInfo for PrivateKey { + fn kty(&self) -> JsonWebKeyType { + match self { + PrivateKey::Rsa(_) => JsonWebKeyType::Rsa, + PrivateKey::EcP256(_) | PrivateKey::EcP384(_) | PrivateKey::EcK256(_) => { + JsonWebKeyType::Ec + } + } + } + + fn possible_algs(&self) -> &'static [JsonWebSignatureAlg] { + match self { + PrivateKey::Rsa(_) => &[ + JsonWebSignatureAlg::Rs256, + JsonWebSignatureAlg::Rs384, + JsonWebSignatureAlg::Rs512, + JsonWebSignatureAlg::Ps256, + JsonWebSignatureAlg::Ps384, + JsonWebSignatureAlg::Ps512, + ], + PrivateKey::EcP256(_) => &[JsonWebSignatureAlg::Es256], + PrivateKey::EcP384(_) => &[JsonWebSignatureAlg::Es384], + PrivateKey::EcK256(_) => &[JsonWebSignatureAlg::Es256K], + } + } +} + +impl Thumbprint for PrivateKey { + fn thumbprint_prehashed(&self) -> String { + JsonWebKeyPublicParameters::from(self).thumbprint_prehashed() + } +} + +/// A structure to store a list of [`PrivateKey`]. The keys are held in an +/// [`Arc`] to ensure they are only loaded once in memory and allow cheap +/// cloning +#[derive(Clone, Default)] +pub struct Keystore { + keys: Arc>, +} + +impl Keystore { + /// Create a keystore out of a JSON Web Key Set + #[must_use] + pub fn new(keys: JsonWebKeySet) -> Self { + let keys = Arc::new(keys); + Self { keys } + } + + /// Get the public JSON Web Key Set for the keys stored in this [`Keystore`] + #[must_use] + pub fn public_jwks(&self) -> PublicJsonWebKeySet { + self.keys + .iter() + .map(|key| { + key.cloned_map(|params: &PrivateKey| JsonWebKeyPublicParameters::from(params)) + }) + .collect() + } +} + +impl Deref for Keystore { + type Target = JsonWebKeySet; + + fn deref(&self) -> &Self::Target { + &self.keys + } +} diff --git a/matrix-authentication-service/crates/keystore/tests/generate.sh b/matrix-authentication-service/crates/keystore/tests/generate.sh new file mode 100644 index 00000000..b6cbe108 --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/generate.sh @@ -0,0 +1,34 @@ +#!/bin/sh +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +set -eux + +KEYS="$(dirname "$0")/keys" +mkdir -p "${KEYS}" + +export PASSWORD="hunter2" + +convert() { + FILE=$1 + NAME=$2 + openssl asn1parse -noout -in "${KEYS}/${FILE}.pem" -out "${KEYS}/${FILE}.der" + openssl pkcs8 -topk8 -nocrypt -in "${KEYS}/${FILE}.pem" -out "${KEYS}/${NAME}.pkcs8.pem" + openssl asn1parse -noout -in "${KEYS}/${NAME}.pkcs8.pem" -out "${KEYS}/${NAME}.pkcs8.der" + openssl pkcs8 -topk8 -passout env:PASSWORD -in "${KEYS}/${FILE}.pem" -out "${KEYS}/${NAME}.pkcs8.encrypted.pem" + openssl asn1parse -noout -in "${KEYS}/${NAME}.pkcs8.encrypted.pem" -out "${KEYS}/${NAME}.pkcs8.encrypted.der" +} + +openssl genrsa -out "${KEYS}/rsa.pkcs1.pem" 2048 +convert "rsa.pkcs1" "rsa" + +openssl ecparam -genkey -name prime256v1 -noout -out "${KEYS}/ec-p256.sec1.pem" +convert "ec-p256.sec1" "ec-p256" + +openssl ecparam -genkey -name secp384r1 -noout -out "${KEYS}/ec-p384.sec1.pem" +convert "ec-p384.sec1" "ec-p384" + +openssl ecparam -genkey -name secp256k1 -noout -out "${KEYS}/ec-k256.sec1.pem" +convert "ec-k256.sec1" "ec-k256" diff --git a/matrix-authentication-service/crates/keystore/tests/keys/ec-k256.pkcs8.der b/matrix-authentication-service/crates/keystore/tests/keys/ec-k256.pkcs8.der new file mode 100644 index 0000000000000000000000000000000000000000..793d3987db9c03c338e3cc1c5569d9b1f0c27b2d GIT binary patch literal 135 zcmV;20C@i}frJ7905A{+2P%e0&OHJF1_djD1ON&IZ7^#B0RaRcPcnH(b=OmAs_mZV z1@Vk8#sisK8SNv`rB|tw_1gNPp+o~h00c;<)U>9Hs3+ko>UYo1^jn{DIg8!;NXA+3 ptt(!VnQYl5v_gT=+K_QMFfOen8Si=97@(xNv_5;QZvJtBxB7L#XbdL<2$q1W-ZWC#pr2DO^#RoO&0ZU6uP literal 0 HcmV?d00001 diff --git a/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.encrypted.der b/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.encrypted.der new file mode 100644 index 0000000000000000000000000000000000000000..e0d77bb0c994c4b44b1d3bd212b11a961b328d6c GIT binary patch literal 239 zcmXqLd}9#K#;Mij(e|B}k(JlL%Rm#sZxgMo&u;7w>vsbHcOqeZ literal 0 HcmV?d00001 diff --git a/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.encrypted.pem b/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.encrypted.pem new file mode 100644 index 00000000..e8daf6db --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.encrypted.pem @@ -0,0 +1,7 @@ +-----BEGIN ENCRYPTED PRIVATE KEY----- +MIHsMFcGCSqGSIb3DQEFDTBKMCkGCSqGSIb3DQEFDDAcBAhIOTdQ9pS7EgICCAAw +DAYIKoZIhvcNAgkFADAdBglghkgBZQMEASoEEEVvTsSwG1HEr6urEKUSC8kEgZCQ +fLQHNDHSjGin9RvcMYi5htsKZbRJK1JL19o7cf8W4AH0kKNAlDtJBrc7j/9tlCkP +b/7O7KFCNkeCrfF113mzgoRuD4xLzoe3n+ybpeBgf8WJuJowiZwhKGXGlUP/m+XX +aWiCKUaaA4huhJbQzJDBdVUnKEZZ+lysEMjYjNgplGc2uvoNSywWKHubgY9Wj0Y= +-----END ENCRYPTED PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.pem b/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.pem new file mode 100644 index 00000000..07a7aede --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.pkcs8.pem @@ -0,0 +1,5 @@ +-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg5Ru1AmWbX0F4p3X0 +8YIWMnVm+6KJqQiIjm0Pw2BDqO6hRANCAARQQd/kCEAv7PYjKvA+xhQAvnQXNbXZ +fXfUHEiuBjpV2b70TZCr08POfUZf/BjTHG+NuluyLFle6dJWIga1muhV +-----END PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.sec1.der b/matrix-authentication-service/crates/keystore/tests/keys/ec-p256.sec1.der new file mode 100644 index 0000000000000000000000000000000000000000..6a9f8b3998829d0bd77b522e755d63f3e717f90d GIT binary patch literal 121 zcmV-<0EYiCcLD(c1R&)bwE|_EUqN`Mb@cIq7BY2a`=W`d2#Aht55r(XsP3Q&1_&yK zNX|V20SBQ(13~}X zJkO{oXj<`~Yk~AtIFnW|bZ##EIUeE(h6)>d+4aRnvUC{x{&}Hf17ZLKteG9X*4YJX z=9h5QDh1~`=28V>G;e6D&Of`$?sTKn@vJ@zo&AA(!+r8^o}a)@$MizR-|gch&AR@) neI=ZVy0qto1!P(8*&H9a^g5(_+`!B=*3$H5I)Os1lKdeY=L1%H literal 0 HcmV?d00001 diff --git a/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.encrypted.der b/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.encrypted.der new file mode 100644 index 0000000000000000000000000000000000000000..1d8cab4b1368a9924eed81688c57fdcdf3d4f55f GIT binary patch literal 288 zcmXqLVw5omXXDgr^Jx3d%gD-W;ANnR;PM#Auy74nJ+epWR>XB=ks`HMs+Zz%Vu583qT!uR!i z?)J)U|HG!!o)z7*zJ9HSN3F%pqM2If7XRt%X0CEEVc+~^onoBPB=d`I6Yg7WeqXof z^@fL6z0Or{KW}NZ>p}kiY44mlc@6&N>vXX5$)>0MSQZ!l%F2D_I-TChix>9XJ=y7- h-0MEGNkG|RvDMj2!(BX*YojtC;(}GcJBZH literal 0 HcmV?d00001 diff --git a/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.encrypted.pem b/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.encrypted.pem new file mode 100644 index 00000000..524d25b2 --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.encrypted.pem @@ -0,0 +1,8 @@ +-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIBHDBXBgkqhkiG9w0BBQ0wSjApBgkqhkiG9w0BBQwwHAQIYMIe05yFZUgCAggA +MAwGCCqGSIb3DQIJBQAwHQYJYIZIAWUDBAEqBBAlgotQyGiZyH4G0SlIKij5BIHA +0vyLKiFzcUxy5Ch1FGWx4WpZlzhBKwk4ZPxKBH18/DXVbC9yfZJR5dCTgE46fFLM +QJmOTxRbY7B3SH9UsvLQ96+83Y0et/wGLIdqW4yvf60oSH042XKZKs6j/I6LA3pE +NAez9K4hXjKSN9FGkN86s+9+ouuw4dVKznu3zzk6uuBv/5buQwkNMP1vLIgHDh1n +ZvimXlfqOkeZriyNk6OhjN3JiU1jjUeZghAjOKM6o6U5CYAi9KgCDBJWtu6M8edV +-----END ENCRYPTED PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.pem b/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.pem new file mode 100644 index 00000000..ed487587 --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.pkcs8.pem @@ -0,0 +1,6 @@ +-----BEGIN PRIVATE KEY----- +MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDB7jIEkTYf/O4QMPM+o +KGha8Z9rgfRVOJNWMHRuLvw5HuIJhgobe9n1xUaydBj7/nmhZANiAASsmR291tkF +a+aXcNUqBec55lIFYjRvaKvOP7vL7nSj1PGsPgud/YF7w33yb56fwE7H9ELG3+3j +JM26/rx9JZyKurTnhQVkWe/ZHB+59Dqke9zAzDXW0vRmOoFCrZL8IRw= +-----END PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.sec1.der b/matrix-authentication-service/crates/keystore/tests/keys/ec-p384.sec1.der new file mode 100644 index 0000000000000000000000000000000000000000..4ab9beeb545bf6d73e5aad95fbe3eab2307ddbb4 GIT binary patch literal 167 zcmV;Y09gMpfusTf0R%96jDaLghyOc-3_Q=MC}>*opKF2iRXCGYFm!G%{5c-t35E(A zd)f8HMzVAm`~G>L2L=Tzfdl{|p=1MM00gX=9lh4s1#9M)aMdaW=Q-w51!6RBXsgaY zyUXr$qtx-NJ`0`wfqTP!@^7A>z)r{XLdM_i<0Q?x{=9u9oQk@%=Y<7iS?}2#AG!28 VqCoiA#UA2l93wrg#FtK8NMXq~p ziPDsg(x5AchTJ=k>j0E+C8;xl0bKe&P5oo=2E6XEY%bsX)!(&gTWmKYC|4V$2&ItU zYPLs2AGFJBBLEcZAU5!2z+Zl%KXByoB;NOtP0&{(fu$ZeVa}nMDkN#3|3{xE2Ojsb zPvE!S`=LV0Ict0zP}{E!Q=-Kxk>X3zw}lm0(!VNU5Smq#*ZI8)QZTO15bxTw#zC4% zts=N?w4V99wU@K?F=WRA0|5X50)hbma5-X1X+yWkN^Jwc(83+;fCSQ3o(;Dm>=^*% z)i)4zZZwVp|1vfNxXr9TA09!SZ!(KnSY^MK(;cphmHUI;W72st5-wo#$q95&22R5{ zqD=u)2?wx1V@*TI04Ar5X!2{1C43X~IAw|~l{Nv!<8`fCc|*md@YloqI&LR~wiGa~ zink1KjGj)S^}8{EgO_;SIEp`425Sc>@Q>OM6rGf%$Cuu1hsP_w*7uf)r){ch|K1%h z94Q7uCr_P1=5Q_XV@E3HyHs;!x758DYb8V-Q;^43&>}(h zRD}&}1io$ca{7*z2^l)GRl|nC0)c@5-%E0lRL$hM++&B3BMdf~Z!gw8L>sB#RVYo| zYk^MX*Jwk5@H?EzKJq5rS%zkbz{xmj37OaT|7CD(e6&OvT-CnpDn*}*9lD6h7_`9x zK+&hOK8s$tG$$5+h`r&)wv-JGfs6e}Bzqfz#kV#b=0@jIX}WNV(Kl^X^S|`v0)c@5 z)jN{eU>D5WokVT%cm587*$WTQFMTWWj}42H3W71cz~wq2$B^9WS zcVI@eXNp0>YVgMd?FInv8#aUMqo znK;@bzM3=Ybn1o-4T0)c>S`30Kj4r6HAPd1a;e+gjq?zC_Q94*Bw zi>DPh={<*ASfOs*cQ2Q@<)0XXANyW9(VuiN_)qe88EKBP%|U?BMFBhH@z+YXz=z-{ z+|>3QAZ47C;3X}`uDqDxD$9t6p-}KFo}8&Z%rJUDYz=^Jw@l1{Y( zfq+^O8O$4;vH2;Am+j!gJsIQInoLt-$;V*HRpmSp7B!My!;$W$`e*IWlSU04BXZJU=kZ!OB#8bHh<>DvF Ftj~+FNCN-> literal 0 HcmV?d00001 diff --git a/matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs1.pem b/matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs1.pem new file mode 100644 index 00000000..74ffc74c --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs1.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAuf28zPUp574jDRdX6uN0d7niZCIUpACFo+Po/13FuIGsrpze +yMX6CYWVPalgXW9FCrhxL+4toJRy5npjkgsLFsknL5/zXbWKFgt69cMwsWJ9Ra57 +bonSlI7SoCuHhtw7j+sAlHAlqTOCAVz6P039Y/AGvO6xbC7f+9XftWlbbDcjKFcb +pQilkN9qtkdEH7TLayMAFOsgNvBlwF9+oj9w5PIk3veRTdBXI4GlHjhhzqGZKiRp +oP9HnycHHveyT+C33vuhQso5a3wcUNuvDVOixSqR4kvSt4UVWNK/KmEQmlWU1/m9 +ClIwrs8Q79q0xkGaSa0iuG60nvm7tZez9TFkxwIDAQABAoIBAHA5YkppQ7fJSm0D +wNDCHeyABNJWng23IuwZAOXVNxB1bjSOAv8yNgS4zaw/Hx5BnW8yi1lYZb+W0x2u +i5X7g91j0nkyEi5g88kJdFAGTsM5ok0BUwkHsEBjTUPIACanjGjya48lfBP0OGWK +LJU2Acbjda1aeUPFpPDXw/w6bieEthQwroq3DHCMnk6i9bsxgIOXeN04ij9XBmsH +KPCP2hAUnZSlx5febYfHK7/W95aJp22qa//eHS8cKQZCJ0+dQuZwLhlGosTFqLUm +qhPlt/b1EvPPY0cq5rtUc2W31L0YayVEHVOQx1fQIkH2VIUNbAS+bfVy+o6WCRk6 +s1XDhsECgYEA30tykVTN5LncY4eQIww2mW8v1j1EG6ngVShN3GuBTuXXaEOB8Duc +yT7yJt1ZhmaJwMk4agmZ1/f/ZXBtfLREGVzVvuwqRZ+LHbqIyhi0wQJA0aezPote +uTQnFn+IveHGtpQNDYGL/UgkexuCxbc2HOZG51JpunCK0TdtVfO/9OUCgYEA1TuS +2WAXzNudRG3xd/4OgtkLD9AvfSvyjw2LkwqCMb3A5UEqw7vubk/xgnRvqrAgJRWo +jndgRrRnikHCavDHBO0GAO/kzrFRfw+e+r4jcLl0Yadke8ndCc7VTnx4wQCrMi5H +7HEeRwaZONoj5PAPyA5X+N/gT0NNDA7KoQT45DsCgYBt+QWa6A5jaNpPNpPZfwlg +9e60cAYcLcUri6cVOOk9h1tYoW7cdy+XueWfGIMf+1460Z90MfhP8ncZaY6yzUGA +0EUBO+Tx10q3wIfgKNzU9hwgZZyU4CUtx668mOEqy4iHoVDwZu4gNyiobPsyDzKa +dxtSkDc8OHNV6RtzKpJOtQKBgFoRGcwbnLH5KYqX7eDDPRnj15pMU2LJx2DJVeU8 +ERY1kl7Dke6vWNzbg6WYzPoJ/unrJhFXNyFmXj213QsSvN3FyD1pFvp/R28mB/7d +hVa93vzImdb3wxe7d7n5NYBAag9+IP8sIJ/bl6i9619uTxwvgtUqqzKPuOGY9dnh +oce1AoGBAKZyZc/NVgqV2KgAnnYlcwNn7sRSkM8dcq0/gBMNuSZkfZSuEd4wwUzR +iFlYp23O2nHWggTkzimuBPtD7Kq4jBey3ZkyGye+sAdmnKkOjNILNbpIZlT6gK3z +fBaFmJGRJinKA+BJeH79WFpYN6SBZ/c3s5BusAbEU7kE5eInyazP +-----END RSA PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs8.der b/matrix-authentication-service/crates/keystore/tests/keys/rsa.pkcs8.der new file mode 100644 index 0000000000000000000000000000000000000000..7863b1094f6f91de09a6ba9dd9279355e429ff43 GIT binary patch literal 1217 zcmV;y1U~yPf&{$+0RS)!1_>&LNQUrr!ay9qXGc{0)hbn0J;6V%=Ibf zz9S76SL)+*ce&zZA{3+mg`?x>|6Rqnfvm2a-pIxJ35AtCsbF1iMGClaFYYa%lyc^J zW0DIC7Re_spYvU{iWUoc^}{f+Vtqxfdv1x+l#bG%D~E>MJCExClyD`fGlBtJ`aezm zWAFyN?y+nx-}}|ywP{;yHzO!l8>I-Pkl$*yM?@dA%WES56zd>1@MXYXexg5cl~QuMJb8#VV2FOVYQ6 z6a)-ff4+E5FwFmWiirs%!t=9WNXy20|xKokHetE*VCm#Kow!CaM$VxAyfC z^Uq^PD(1UXb7i;Gy%=jHL>*I*$5+rILH1OI4QvFyZS`{cj+O}-IXMwzU(SRpNk#3h{_nW!2&?hr?WnbUb!?U7JrDn;l{R<4Gn>d{YWHx z8-m5RHXPVD;{_a0VPL#Vd=a6*%cVhg(>oZrpb-m$~Ji7=s`CUOLgA zbTRl(@^=|&jT;%=!ub>FXvDS2rPMUOlzl3lhBD#mGHr7W#ijZzc!+ z-Gx@Y-u%d!*7w5~yLY+yHGn{B4}Kv3EFhoTm#Dq#Uv5tvFM`!7t1^$c;h6Q=;i1R1 z0)c@5rgCM^%~lGP*r))Wb|rHIXYRyOkk1`*tv`Sh4Y?*{eUz>d-Y~&T(TG`Cr)|#K zan^za&LNQU&90UlJ+V8A7IX!0r z0tf&w3T_rVn$ zmZ%Cb7{4i&OHh5}fBDl_@YPM3yIV$Ut6r$V2rW`8vP*S!E*c>+QgGSc;s_n%)4gU` zXB)eoQ#+KMEH~7-IoThB3(QG1@IUL!D@_2(V`WpWnKb;|l-b|KmF+bQA0vn&#U2|- znH92{WCe6qdWCi+QXlgw2nnc9;{U@-`0d9yqxxHDYMdK~mAFR!n_NO?B!{INzlX&+ zhjTIWvsI<(>Z9iBPIfu`BZ9Zw+v|8sOgjLZ`vHf*0y*J{Ula>9x;0YGN2m%*;2}MI z4$N9oRNX!-*i6NYl8Pfy{v^8)Z0+u_<*+oDAPXdU7_f2+d%711NgQtbCQ`FbD~8dE zvsQqXK4cLi{n`IwQ1GWrHtjsBZjjVO=CfX=vBF#LfG^sPM_KNbMK9+Cuy;9N$B*^1 zA;hTsddP>%Ve&DUe$6=CUxMq~kO|V!*!*tWU?3X(%e{L1I^?j|H2uee_j?7?e=^D= zbhberh*jv>f$?s?BkSVz+>W3;m7{)Iz%~aBxRLO>->@0R`~GvYmMs~vcTuFD1VK9# z#n+z)uB?n)sdZpV?<%z1W>yUgcls1+?ppC-|F>(+cpyCb0||jYle&T! zLMT1O?*iHq=sLC*CP9$wAI*^EXr+ader3(#qyMG~d`?qCyhHy)i`Cg|8{Ow)6 zSQttXck&d!Is55}lF~c2o(W1J6;e6gkQp2ayVI{HX90>HJByXK*r^iD3kaVe81u`T z2p6CFuqytQUB-Pw*JZpJ?6Rx&N-_+r?}$msRg%d@R5J}zo$AlkXer-P-N90?fa+UV z>`G>vs)JCIdJRJbq3d1E|a)`!Q+#Ob>YkqG_Wh3MGU79Mv)m*e#>5)z?0`?4RmV%)Qp5y>0uE$jez76EMKFSZHNVoK zQP|%DKDMx9LY8Cs>?DBd?ssRiUE;+x}qC0y(z3BevKr^IO@jaG`Ex| zNye&CP65Xa=I0!{@+;h%M z8?FtiMw+N~JKFX74_u}T1Gf+d*a0{h9Uow3GV!!s)nIw%(eGfyp$s3i*H@m$U?zA9 zs`}-zJRI{5DMaBxd=v(>9I|d&m#o^_uF1&xd%JM0VdPtjqiIT3?hYRon>sS_d(tP# z-cH%ST4Gw(Gg^iHqlIo>kxsxwVq{+%dt&U0Z1iKS%vU=rCfN*0!cSE{0&bBNY>-F^ nc?Oy_&JQLNE`z9VO!!RvA+ZezXy|Mde=|=DrO; { + #[test] + fn $name() { + let bytes = include_bytes!(concat!("./keys/", $path)); + let key = PrivateKey::load(bytes).unwrap(); + assert!(matches!(key, PrivateKey::$kind(_)), "wrong key type"); + + let algs = key.possible_algs(); + assert_ne!(algs.len(), 0); + + for alg in algs { + let header = JsonWebSignatureHeader::new(alg.clone()); + let payload = "hello"; + let signer = key.signing_key_for_alg(alg).unwrap(); + let jwt = Jwt::sign(header, payload, &signer).unwrap(); + let verifier = key.verifying_key_for_alg(alg).unwrap(); + jwt.verify(&verifier).unwrap(); + } + } + }; +} + +/// Generate a test which loads an encrypted key, and then tries signing and +/// verifying a JWT for each available algorithm +macro_rules! enc_test { + ($name:ident, $kind:ident, $path:literal) => { + #[test] + fn $name() { + let bytes = include_bytes!(concat!("./keys/", $path)); + let key = PrivateKey::load_encrypted(bytes, PASSWORD).unwrap(); + assert!(matches!(key, PrivateKey::$kind(_)), "wrong key type"); + + let algs = key.possible_algs(); + assert_ne!(algs.len(), 0); + + for alg in algs { + let header = JsonWebSignatureHeader::new(alg.clone()); + let payload = "hello"; + let signer = key.signing_key_for_alg(alg).unwrap(); + let jwt = Jwt::sign(header, payload, &signer).unwrap(); + let verifier = key.verifying_key_for_alg(alg).unwrap(); + jwt.verify(&verifier).unwrap(); + } + } + }; +} + +/// Generate a PEM decoding and encoding test +macro_rules! pem_test { + ($name:ident, $path:literal) => { + #[test] + fn $name() { + let pem = include_str!(concat!("./keys/", $path, ".pem")); + let key = PrivateKey::load_pem(pem).unwrap(); + let pem2 = key.to_pem(pem_rfc7468::LineEnding::LF).unwrap(); + + assert_eq!(pem, pem2.as_str()); + } + }; +} + +/// Generate a DER decoding and encoding test +macro_rules! der_test { + ($name:ident, $path:literal) => { + #[test] + fn $name() { + let der = include_bytes!(concat!("./keys/", $path, ".der")); + let key = PrivateKey::load_der(der).unwrap(); + let der2 = key.to_der().unwrap(); + + assert_eq!(der, der2.as_slice()); + } + }; +} + +plain_test!(plain_rsa_pkcs1_pem, Rsa, "rsa.pkcs1.pem"); +plain_test!(plain_rsa_pkcs1_der, Rsa, "rsa.pkcs1.der"); +plain_test!(plain_rsa_pkcs8_pem, Rsa, "rsa.pkcs8.pem"); +plain_test!(plain_rsa_pkcs8_der, Rsa, "rsa.pkcs8.der"); +plain_test!(plain_ec_p256_sec1_pem, EcP256, "ec-p256.sec1.pem"); +plain_test!(plain_ec_p256_sec1_der, EcP256, "ec-p256.sec1.der"); +plain_test!(plain_ec_p256_pkcs8_pem, EcP256, "ec-p256.pkcs8.pem"); +plain_test!(plain_ec_p256_pkcs8_der, EcP256, "ec-p256.pkcs8.der"); +plain_test!(plain_ec_p384_sec1_pem, EcP384, "ec-p384.sec1.pem"); +plain_test!(plain_ec_p384_sec1_der, EcP384, "ec-p384.sec1.der"); +plain_test!(plain_ec_p384_pkcs8_pem, EcP384, "ec-p384.pkcs8.pem"); +plain_test!(plain_ec_p384_pkcs8_der, EcP384, "ec-p384.pkcs8.der"); +plain_test!(plain_ec_k256_sec1_pem, EcK256, "ec-k256.sec1.pem"); +plain_test!(plain_ec_k256_sec1_der, EcK256, "ec-k256.sec1.der"); +plain_test!(plain_ec_k256_pkcs8_pem, EcK256, "ec-k256.pkcs8.pem"); +plain_test!(plain_ec_k256_pkcs8_der, EcK256, "ec-k256.pkcs8.der"); + +enc_test!(enc_rsa_pkcs8_pem, Rsa, "rsa.pkcs8.encrypted.pem"); +enc_test!(enc_rsa_pkcs8_der, Rsa, "rsa.pkcs8.encrypted.der"); +enc_test!(enc_ec_p256_pkcs8_pem, EcP256, "ec-p256.pkcs8.encrypted.pem"); +enc_test!(enc_ec_p256_pkcs8_der, EcP256, "ec-p256.pkcs8.encrypted.der"); +enc_test!(enc_ec_p384_pkcs8_pem, EcP384, "ec-p384.pkcs8.encrypted.pem"); +enc_test!(enc_ec_p384_pkcs8_der, EcP384, "ec-p384.pkcs8.encrypted.der"); +enc_test!(enc_ec_k256_pkcs8_pem, EcK256, "ec-k256.pkcs8.encrypted.pem"); +enc_test!(enc_ec_k256_pkcs8_der, EcK256, "ec-k256.pkcs8.encrypted.der"); + +// Test PEM/DER serialization +pem_test!(serialize_rsa_pkcs1_pem, "rsa.pkcs1"); +der_test!(serialize_rsa_pkcs1_der, "rsa.pkcs1"); +pem_test!(serialize_ec_p256_sec1_pem, "ec-p256.sec1"); +der_test!(serialize_ec_p256_sec1_der, "ec-p256.sec1"); +pem_test!(serialize_ec_p384_sec1_pem, "ec-p384.sec1"); +der_test!(serialize_ec_p384_sec1_der, "ec-p384.sec1"); +pem_test!(serialize_ec_k256_sec1_pem, "ec-k256.sec1"); +der_test!(serialize_ec_k256_sec1_der, "ec-k256.sec1"); + +#[test] +fn load_encrypted_as_unencrypted_error() { + let pem = include_str!("./keys/rsa.pkcs8.encrypted.pem"); + assert!(PrivateKey::load_pem(pem).unwrap_err().is_encrypted()); + + let der = include_bytes!("./keys/rsa.pkcs8.encrypted.der"); + assert!(PrivateKey::load_der(der).unwrap_err().is_encrypted()); +} + +#[test] +fn load_unencrypted_as_encrypted_error() { + let pem = include_str!("./keys/rsa.pkcs8.pem"); + assert!( + PrivateKey::load_encrypted_pem(pem, PASSWORD) + .unwrap_err() + .is_unencrypted() + ); + + let der = include_bytes!("./keys/rsa.pkcs8.der"); + assert!( + PrivateKey::load_encrypted_der(der, PASSWORD) + .unwrap_err() + .is_unencrypted() + ); +} + +#[allow(clippy::similar_names)] +#[test] +fn generate_sign_and_verify() { + // Use a seeded RNG to keep the snapshot stable + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + let rsa = PrivateKey::generate_rsa(&mut rng).expect("Failed to generate RSA key"); + insta::assert_snapshot!(&*rsa.to_pem(LineEnding::LF).unwrap()); + + let ec_p256 = PrivateKey::generate_ec_p256(&mut rng); + insta::assert_snapshot!(&*ec_p256.to_pem(LineEnding::LF).unwrap()); + + let ec_p384 = PrivateKey::generate_ec_p384(&mut rng); + insta::assert_snapshot!(&*ec_p384.to_pem(LineEnding::LF).unwrap()); + + let ec_k256 = PrivateKey::generate_ec_k256(&mut rng); + insta::assert_snapshot!(&*ec_k256.to_pem(LineEnding::LF).unwrap()); + + // Create a keystore out of the keys + let keyset = Keystore::new(JsonWebKeySet::new(vec![ + JsonWebKey::new(rsa), + JsonWebKey::new(ec_p256), + JsonWebKey::new(ec_p384), + JsonWebKey::new(ec_k256), + ])); + + // And extract the public JWKS + let jwks = keyset.public_jwks(); + insta::assert_yaml_snapshot!(jwks); + + // Try signing for each supported algorithm + for alg in [ + JsonWebSignatureAlg::Rs256, + JsonWebSignatureAlg::Rs384, + JsonWebSignatureAlg::Rs512, + JsonWebSignatureAlg::Ps256, + JsonWebSignatureAlg::Ps384, + JsonWebSignatureAlg::Ps512, + JsonWebSignatureAlg::Es256, + JsonWebSignatureAlg::Es384, + JsonWebSignatureAlg::Es256K, + ] { + // Find a matching key and sign with it + let key = keyset.signing_key_for_algorithm(&alg).unwrap(); + let signer = key.params().signing_key_for_alg(&alg).unwrap(); + let header = JsonWebSignatureHeader::new(alg.clone()); + let token = Jwt::sign_with_rng(&mut rng, header, "", &signer).unwrap(); + insta::assert_snapshot!(format!("jwt_{alg}"), token.as_str()); + + // Then try to verify from the public JWKS + token.verify_with_jwks(&jwks).unwrap(); + } +} diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-2.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-2.snap new file mode 100644 index 00000000..42a2f094 --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-2.snap @@ -0,0 +1,10 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: "ec_p256.to_pem(LineEnding::LF).unwrap()" +--- +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIH3L+ZYgfEaJtclP07qPQBrmkHEhYkyYooxvU8AlSW+CoAoGCCqGSM49 +AwEHoUQDQgAEXcA+X+lhDCmmzaUQFh7i7gkT7mwdrRUsMl9RSfyWh93n+xq3O4/m +vMmUnlvy7tBoHkcAdTJ+Zkv+loLw+mkcBA== +-----END EC PRIVATE KEY----- + diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-3.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-3.snap new file mode 100644 index 00000000..b71a5dca --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-3.snap @@ -0,0 +1,11 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: "ec_p384.to_pem(LineEnding::LF).unwrap()" +--- +-----BEGIN EC PRIVATE KEY----- +MIGkAgEBBDAl3R97SR8hWLuMH6737YdvXVb7P7T9pKSIhQozmzN+r+V5Ncvjn+DQ +Q/QxYr9nLwOgBwYFK4EEACKhZANiAASa86XQW7CDF9GhvcBY53sJ4lP0z9rfrjwo +nwixQJIWBROjlpsm5hdIwLfj46IUPYCNEoD8VP8eR2s/uzlgEv1hnz/2wmLlMqU0 +2R5QcY7Gc9CyDTO1bh5V2FFYjks4xfs= +-----END EC PRIVATE KEY----- + diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-4.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-4.snap new file mode 100644 index 00000000..850ca57c --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-4.snap @@ -0,0 +1,10 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: "ec_k256.to_pem(LineEnding::LF).unwrap()" +--- +-----BEGIN EC PRIVATE KEY----- +MHQCAQEEIBcikq9QkV39T8VFZWD4j5wO9xm0FWxhuAmvDRpix8XUoAcGBSuBBAAK +oUQDQgAEf4htTtPsdxlZn1htWE3ueHT4JB/4n4lxVOQdT/3RFuCS5aKQ04oS9pKM +QAHAn1bjbLRQ88Yxi3CgHuCitS+RrA== +-----END EC PRIVATE KEY----- + diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-5.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-5.snap new file mode 100644 index 00000000..a38f81bb --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify-5.snap @@ -0,0 +1,21 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: jwks +--- +keys: + - kty: RSA + n: vClyfM076hWBZonjThx_PX46UQUWb2LfOpUV1655ZGoKMKgqanLMMfLBPjW9ouY6UtrZ7BxEgl01xLZ1dLdD2Ggb2IpwW56PUuZD2w9hJMungjR0ImymFBwjA9j2ucr0eIHdVQoOakEsrB0dqEC-3R7ax7piGCj9YB6uGZbDVfIJUv40o1pb-hvmmyQHwpoU4jR1y_V-OhrdFMPtwCXov2nlrqDb_e-T7TQlu4FN0URI6VxLNcSkgZfJH50PdJPr7AHqtnWhOGBfLaC9jDpGxfbjmC1iSMSzOt6WyVdcnqHv_JpzXu0SzFqpUSm3OI_l2DUjwTJBL1TOIRTVsjQN1w + e: AQAB + - kty: EC + crv: P-256 + x: XcA-X-lhDCmmzaUQFh7i7gkT7mwdrRUsMl9RSfyWh90 + y: 5_satzuP5rzJlJ5b8u7QaB5HAHUyfmZL_paC8PppHAQ + - kty: EC + crv: P-384 + x: mvOl0FuwgxfRob3AWOd7CeJT9M_a3648KJ8IsUCSFgUTo5abJuYXSMC34-OiFD2A + y: jRKA_FT_HkdrP7s5YBL9YZ8_9sJi5TKlNNkeUHGOxnPQsg0ztW4eVdhRWI5LOMX7 + - kty: EC + crv: secp256k1 + x: f4htTtPsdxlZn1htWE3ueHT4JB_4n4lxVOQdT_3RFuA + y: kuWikNOKEvaSjEABwJ9W42y0UPPGMYtwoB7gorUvkaw + diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify.snap new file mode 100644 index 00000000..13523757 --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__generate_sign_and_verify.snap @@ -0,0 +1,32 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: "rsa.to_pem(LineEnding::LF).unwrap()" +--- +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAvClyfM076hWBZonjThx/PX46UQUWb2LfOpUV1655ZGoKMKgq +anLMMfLBPjW9ouY6UtrZ7BxEgl01xLZ1dLdD2Ggb2IpwW56PUuZD2w9hJMungjR0 +ImymFBwjA9j2ucr0eIHdVQoOakEsrB0dqEC+3R7ax7piGCj9YB6uGZbDVfIJUv40 +o1pb+hvmmyQHwpoU4jR1y/V+OhrdFMPtwCXov2nlrqDb/e+T7TQlu4FN0URI6VxL +NcSkgZfJH50PdJPr7AHqtnWhOGBfLaC9jDpGxfbjmC1iSMSzOt6WyVdcnqHv/Jpz +Xu0SzFqpUSm3OI/l2DUjwTJBL1TOIRTVsjQN1wIDAQABAoIBAD4bcC7Bj4U0lAHE +gcLGpTkm8RjNFRqvB//ONed7L5Z1aeelmlqIid3ywtZF5tJeK+ItWsEUk4h0O8sy +bpKX600DEC3phCy5qZUzbDJiYS5ECoZvyeWS9z6sCuK6OrYYx5j/RI9NQnTQDm7B +LYjb1iQmixBh9P8EiJ2xgsXvfYpgSDv2nUBVywp8HUmadAqE89cBsyurvMpR6eY4 +HI0Wge+vIbh0KHj5h7sJ7C8mWBcnjkJ9hWSCumMth3wufEpfs4lCUjhojTkr6ioT +9Mok8VpZvsxzDw3Ubnut1lX7II2EYwNEfgm/Tcw4/NC2eLc1AjoshxLVLg5JWy85 +RGj7FAECgYEAyLTpdQjD0PrQBi6O5HWQnVMVvJQz6bihdKUrjwJOfWvqgKVpAVVQ +IbLUT3J6ooe4Tei6xc7viNA7JZ+KyRvaQuZT4nWW2AkOCOnaxH3NKI4cDpWsdNdC +jNBHnIUUvDU4c4CkgIc8f1Gezm9epDVsgb1+wxwSbB8Je4S7M8is1WUCgYEA7//M +f1UPl32yaXH7ag8DbWHMzeNRw5T+F3zTagp93mCM0mPYyjggMfwPFr7167ISWrG4 +6DfHHABqhjnAkKfrrFMN5Xm8oup60XXIox1FB4Ew4K0VzyMTnSMPMMEGJZx6VQJP +TlGpexs4t+1N/gWgcGv1OgZ+fw7hjcaFtTMAcIsCgYBY/W3tyF8aT7rgotAUl9Px +DZ+7p3KFFKnx0DJKDFnE3o5xT+YJA0yuRF5Cg53AZrDfSmtVvXUTklBHVD/y/3h8 +5ScYZ87kJs+kQBwRtz2515i1FsQKYk9NpCO8UH1rHaXUAJvvqQa35PamzVaHNHDR +7bVAyTeZ3QEVVTqzsSyAyQKBgQDDIcL+QIW10VBPJYqZ9+z8cUkYmVuwXqKdLDOk +gzVFqlC0+tOXzzx2B4+pGchPQ3Bi5x8FR9yOqtPxoGEt/CnE6Z5h1rIF8Am95jsk +TDcE3AiGlJQn30giOhAuvrIlwG1CUudyMTlQS5i5Cquf/qks3sn6zK7q1YmUQTYT +Vo1j8wKBgQCgSISkWgRrQhEdZKBBzcxkWyUXUMieZa+pPTooZGY1VAqyTSXHUlLi +mrWtJ4temNK6OE7xAWJMWL6cVAxQUrs8C11DS//IGMxlMF/7uMU6pOFmMqq52G3D +k3NkZSJmC1liFiu3Q6Q2UqGHRX8CS5MeLncReuNUnFOiX8X2fcjoRw== +-----END RSA PRIVATE KEY----- + diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES256.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES256.snap new file mode 100644 index 00000000..562a72ca --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES256.snap @@ -0,0 +1,5 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: token.as_str() +--- +eyJhbGciOiJFUzI1NiJ9.IiI.Yvudbc_oPln_H02H9woFZurQrgzsuWGnRK2kZzat_rp2HYFZtYobvMw9LqPDgeqq9a1HiL_Hx796SqyobiTXJg diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES256K.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES256K.snap new file mode 100644 index 00000000..9b937afd --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES256K.snap @@ -0,0 +1,5 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: token.as_str() +--- +eyJhbGciOiJFUzI1NksifQ.IiI.4q4ua7R-we5m58rKtLQDHJmQJb15dEUhj7A_H5kh591mrScXFmCYXVQI5iKKXGFHBV_AFISrJF4YjWCHDnLPeQ diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES384.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES384.snap new file mode 100644 index 00000000..5297e8c2 --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_ES384.snap @@ -0,0 +1,5 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: token.as_str() +--- +eyJhbGciOiJFUzM4NCJ9.IiI.p9Otttjs3JOxZCeuIKwkql3YM-nfdxo__EVt84sex_PcokYjY47sa0qsvCqUUhpUoLSBihdchynuYqc5lOFuAM3Pi2pjg-ZrTqrzI23UlzFonlr4Zag9Qo3IYD10HKFq diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS256.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS256.snap new file mode 100644 index 00000000..1d7c18db --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS256.snap @@ -0,0 +1,5 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: token.as_str() +--- +eyJhbGciOiJQUzI1NiJ9.IiI.HlJaOfS2PMi4mWzE8-0EgXt06-MeqzaLRy_04gs4HTS7FugbSJ0rJiwUwhss6O1KWT9TvDqo6AQBO_2hV1DKDiBIIh5Z6M92uC4MJNVLbAVQo6dSBt2DfSzioBI5MoDOBvgbIwSZAIFMqKTbYDa9rQ3XRAaClpqrIN-ACa3gz99ds5mYvUyiYsL5uuEBuWrp8DRk6WKjduhpOi4sMvylZbnfop1uHbvg6_dk5lzXt-1MKIW1QJW_63cFn7vdap5T9U4DBsEkCzYtuwgU-UCmsC8W07QEfcrJhHIlYoPQPHePKF0A4dVHKrgRmf1ik2p6e-VNw129JMvx0KO6v_JBww diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS384.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS384.snap new file mode 100644 index 00000000..058db0ed --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS384.snap @@ -0,0 +1,5 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: token.as_str() +--- +eyJhbGciOiJQUzM4NCJ9.IiI.AHBo1kZW72IQfs7NvjfrD_WpDa9avLdKyf_rqcCp0mJtGID60cgOG5RDTmI6K7TwzykW6l6LjEuYXDR8hemri6mQrtpQ6rMVTJwqJ6D8M92vH4b2gDBwSwbKz427bGdd_fnqm5K2ntwZGC7pceYg1zbcUQ6NJXs3vqKI6YSKustmm9yA1iMfugFG4eLAPrpfTtLmT1sSWYTYWHVT-6G5q7Bfk7Yu5aHiGDQTo427-Y9YF2fabIuDyCGG48UrBp0ajlm1MHKCBuOvK6NI5Jojd9IWDIf7tvAArsrKVR8QRvDXqqPInJEEZ5x7H1YEEZ5Qrh4XKVhRh9b3O-grDMxMng diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS512.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS512.snap new file mode 100644 index 00000000..1736ec04 --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_PS512.snap @@ -0,0 +1,5 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: token.as_str() +--- +eyJhbGciOiJQUzUxMiJ9.IiI.o_-UDhc26qYAstuaFJQUz8OKw3UeMc4N7b3U4qcLM-84dxRdw-wBw1rf94jX71vsrFQ2bEh6J5fc4_VtYgKBb2P6QvoL55c7Gqr-5JBw8BkoiiCzlvKIsi_j41FH5Gb4ZBE5Nf9vZD7DnD9BhYXadxaiksx20oNRKIKQ3oMiJxH1w0c-miCSoIR0jnS1QLlKoHYVb7wnkCiR2SOYQ42Je8B8REVzWm2GrqS2cRWnpi3nHihrapruL_BA161Ip1uH4lUFdZLXeG-R6pAlg1OJ_QXSZlP16nzT6MAW_-IFXfioR1QKT8AFNBodY8zlQCGglMyppZi5Y9i7YaMxFgnM4A diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS256.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS256.snap new file mode 100644 index 00000000..43d4230e --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS256.snap @@ -0,0 +1,5 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: token.as_str() +--- +eyJhbGciOiJSUzI1NiJ9.IiI.H_iz_ry505dPMNxQIi4raU88i3wu7DS0Rre6Qm_LJz0Ee_gd5C_t92fBcrFkber1XL_p8AvlXx4DT2Zr_PMBL_2IblJ7t0Od5wnGC__twarj0v1t6KfUkLXcJ3Jy-StnHNbFTmdFnLuGGWIO7xG9h6xgKIvTroVoLJekMzYCc0wSFiyCfaow4yuKesQHUO-N9VDDPoYhkCPqbhVI_d0y6u7KmQy97FbCdCIxvPGHWrwxWcmYbTh4K9xhGDspDUUEubjYTg3t-oaMc2TJqWvu2FE8jyD02A8OCgca6bCU3NmV_Qr6LSUpFNsL4c-0sIp3-L9ndEWzGnN-ZeeGKur-FQ diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS384.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS384.snap new file mode 100644 index 00000000..1ba88566 --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS384.snap @@ -0,0 +1,5 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: token.as_str() +--- +eyJhbGciOiJSUzM4NCJ9.IiI.bxKYTQQ4RlxOUvxbU_kuwJKGXXiuIPeRgO78a_3zHjvxIzDNKDvQK3w4DUVTlibR-iTVOASRSycWifBJZx_tsU7-BSqrBcjMtgP7mW-HwZ6pANO071iPkkiQU7gqMzbc2tz4uqGI0Z0izkX0_9dOOFSb7jKIUMzzW1O14fBPhZ4kPqkj07A9S9LW9lauQUTXrFgyEaT6G372cyNxi3-T55u9lkjjiiVN4TAhkaXUSN79IE2rNstU8DtKKs725WNUFy30f1-Ftc-J2uGEOsMZ9CQvEVPwOKbvFY2Uh1S8-FT4ahhwj1fxrmUwDH2lSmz6Rj5zf9-FF-IzivSVq4Z4ig diff --git a/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS512.snap b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS512.snap new file mode 100644 index 00000000..7312fefc --- /dev/null +++ b/matrix-authentication-service/crates/keystore/tests/snapshots/keystore__jwt_RS512.snap @@ -0,0 +1,5 @@ +--- +source: crates/keystore/tests/keystore.rs +expression: token.as_str() +--- +eyJhbGciOiJSUzUxMiJ9.IiI.nFsZ3X8GCgpPEojuEktc9a4C-YGQYx8XpbzhOkgnMVrw_wpqIQgWI--4r6BYV6TYAH8NBdQ8Dkdw6POh1Ni-vAtE2rAzjU19ySth5mfP7WEJXRxA1oEV3-dOqCgUI2JJEM13DuLlWFsUaOCbc1_kCkiziTcLtNap__EPGp5koRy-ZyVa1p_mQSQ4NlhJ3hZfHMGnQ0k3RWnpBn3AqERWllQllLniWGQ4l7rZStsD8PRr-rg7P7W7CRIyjrDqy_3bNJyKQCzs_oUrxO-Z7CU6-KfAeyM2U80TQvhZb-Z8_1dJ8e9WsfudQMLtHgO4tlD678Ywezjvr5ackZdn4QEEEA diff --git a/matrix-authentication-service/crates/listener/Cargo.toml b/matrix-authentication-service/crates/listener/Cargo.toml new file mode 100644 index 00000000..56f50154 --- /dev/null +++ b/matrix-authentication-service/crates/listener/Cargo.toml @@ -0,0 +1,45 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-listener" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +bytes.workspace = true +futures-util.workspace = true +http-body.workspace = true +hyper-util.workspace = true +hyper.workspace = true +pin-project-lite.workspace = true +socket2.workspace = true +thiserror.workspace = true +tokio-rustls.workspace = true +tokio-util.workspace = true +tokio.workspace = true +tower-http.workspace = true +tower.workspace = true +tracing.workspace = true + +mas-context.workspace = true + +[dev-dependencies] +anyhow.workspace = true +tokio-test.workspace = true +tokio.workspace = true +tracing-subscriber.workspace = true + +[[example]] +name = "demo" +path = "examples/demo/main.rs" diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/ca-key.pem b/matrix-authentication-service/crates/listener/examples/demo/certs/ca-key.pem new file mode 100644 index 00000000..34cbcf2a --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/ca-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEA58uTeW5C7RkjeO+SeBsBhMzkyTrLwLtyVOSTY85bGxiy6UST +1jFpPqdn+BZJEj/mM9QJ7MXxtYJHtXpEeAehjn0hU9n2ozq09BlqlXUvIV4Zuehu +lRdWEGIb5VkruGXeG4SRu9Iiw87x0w8AZ+deK9T1ZK9OeTrwPdfQtDOvazDdGpZB +RGyGKljuI1LGAXSwEaw9t1rMEqubNAJLZnpJxMcSfxHibN5kSaoqnbQ+PxltCwnJ +DNGtNIIzp4Q1gG8fanUiDlIdtEbrsmtwIgbWKZncqHURZJYIZZ785qExku4ydaDy +11a/VmQ5PtJ7Bwoxdq45gKXcHH/RaamrYpFy+wIDAQABAoIBAFiu6KOC7hQslAfH +ETDmlDQs+DONTTtV/C5Cral34B+D2Z+p3y4KBYw1HHAshpR24ipeK9Xs/cdGKw1h +1CRgNw1/Dms4b0aQRX9G4iKAjSGlEQ6xwO1F2mW2Q5oB/do1Dz9T/zXu9eIYoSjx +CmS2fq8icSiuccWsKRJgKbdoNus5MjBfcE79QdzxvCm1jo/SCf9gqaIBK3FogFcl +GMQJulpSopx0o7/jXkbrHbJ2liaxVXEBzqWQbTVmxRUz2xGEakc/uKdiQ7LOYHbm +3hzRqc5kYW87IVq2Nb1y956/1Nuld+DdhF8hCNlcZnnrHq/CVPsOIJ1KlZTlXZML +yJGBkqECgYEA/cr1kVy207tznYhVA2Xw/WH5V2pPTP9gVKxgf1vO1q6Iv1px6VS+ +A80oTqxtqzH/9zAD442P1zEIE4TJdLfPab/OjxAtlOCfxj965DApspFu9+/Te0fo +EFbuD9hXB5iG8XX69eKbs0uJnRdEtb3vibzHEPLg7SWFaApRI6etTukCgYEA6c+k +RiXZ1LcvYr5hlrD9XA1WD95NK9wZvX9t2cCFcdx3aIgYoU4f8kdxakVmv9avxHJr +2tgxnW7INHlMgU5BNcMNU47trS7dxqszzF8mznG8bOc5DXyDVA0M4oB2B3lzU6Nm +JNllGfGvLeK5PNqAW4GYTSwHbljmlFS79Ptf7EMCgYEAy1p0qaTAWac5XGCAvdhQ +4LZAM+ra37dAWJhGOcY2VY5DxA+UdoGQPzuDsIY42ZOWpVmzxAEJ4ENJVVpwkTU2 +3GTz/W3ZGBFj9FWpAm4U+x/M6p0ftwhGydDdr5SJJ2zvs0n1bE/GskM0YMrkIzut +U5APcWUrFNmbq2GY4hjYpQECgYEA4wzq/9vd7z183Kz4Y7e4Md4ZhwtfcYopzOWk +LWNRs0JfCrmvAWW2jDZoosSGhSDcSy66Iijz9WgRLzPj4WW22ZhypoQTtqveXgD/ +KiX0r2GvkynvM3OIrOSHcKVC+PstzTjOBla+YTVb4nlbXQbqwvHUjoyFItleAQlQ +BRTfD7UCgYEAsmy5/tW1+X954cR64kvBDDv47KbYJZK8vb1veJOBdAGdK8+Cbv/9 +sXDDML8wia66Pvn3gOZZszrbabqqYEC2BV7i56etjxrAYoLzoeF1WXKUBC+jWkfp +psaszCgX5xCf/GFpnLd4e0rZmQBQzNeL/RzrkRuvNGjx/VtSZ3amhTw= +-----END RSA PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/ca.csr b/matrix-authentication-service/crates/listener/examples/demo/certs/ca.csr new file mode 100644 index 00000000..cb32c639 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/ca.csr @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICujCCAaICAQAwdTELMAkGA1UEBhMCVVMxFjAUBgNVBAgTDVNhbiBGcmFuY2lz +Y28xCzAJBgNVBAcTAkNBMRgwFgYDVQQKEw9NeSBDb21wYW55IE5hbWUxEzARBgNV +BAsTCk9yZyBVbml0IDExEjAQBgNVBAMTCU15IG93biBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAOfLk3luQu0ZI3jvkngbAYTM5Mk6y8C7clTkk2PO +WxsYsulEk9YxaT6nZ/gWSRI/5jPUCezF8bWCR7V6RHgHoY59IVPZ9qM6tPQZapV1 +LyFeGbnobpUXVhBiG+VZK7hl3huEkbvSIsPO8dMPAGfnXivU9WSvTnk68D3X0LQz +r2sw3RqWQURshipY7iNSxgF0sBGsPbdazBKrmzQCS2Z6ScTHEn8R4mzeZEmqKp20 +Pj8ZbQsJyQzRrTSCM6eENYBvH2p1Ig5SHbRG67JrcCIG1imZ3Kh1EWSWCGWe/Oah +MZLuMnWg8tdWv1ZkOT7SewcKMXauOYCl3Bx/0Wmpq2KRcvsCAwEAAaAAMA0GCSqG +SIb3DQEBCwUAA4IBAQBiCczhqMP1h0ArkBemwQXDCAlFm0wvAzBfPnnUobZwktu5 +1H1MSIc8MSIPbU8Z+skVTJ7R8wHr+qV712v6CcSuC+CZqqdh4slXNNIe7VK/orzl +wJ342uAj9wUWhFlR7/5JhalsfCHtpt8M8Fi1Xt5wKQwuYnH377hKOfiI/30iyNAl +gfxLm+NFEVywAbtCuFYsBIkd9tIxHObdMiQEJaAfFXYgVUaBgAFgheXkgefRLmcy +/uVUAI38LENiVZhoKuY1Gbs2nH+W5ea4VEHc7CJjRWoNJ9XIubsxPYIHuowS7phK +ThfK14BqpyvNgvCDIDELNZ9a6GW9TZz7P8/ZmYwa +-----END CERTIFICATE REQUEST----- diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/ca.json b/matrix-authentication-service/crates/listener/examples/demo/certs/ca.json new file mode 100644 index 00000000..1e8818a1 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/ca.json @@ -0,0 +1,16 @@ +{ + "CN": "My own CA", + "key": { + "algo": "rsa", + "size": 2048 + }, + "names": [ + { + "C": "US", + "L": "CA", + "O": "My Company Name", + "ST": "San Francisco", + "OU": "Org Unit 1" + } + ] +} diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/ca.pem b/matrix-authentication-service/crates/listener/examples/demo/certs/ca.pem new file mode 100644 index 00000000..33298e03 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/ca.pem @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgIUZJIz+zgaa4BBKcNcHNu03FOKS/cwDQYJKoZIhvcNAQEL +BQAwdTELMAkGA1UEBhMCVVMxFjAUBgNVBAgTDVNhbiBGcmFuY2lzY28xCzAJBgNV +BAcTAkNBMRgwFgYDVQQKEw9NeSBDb21wYW55IE5hbWUxEzARBgNVBAsTCk9yZyBV +bml0IDExEjAQBgNVBAMTCU15IG93biBDQTAeFw0yMjEwMTIxMzI4MDBaFw0yNzEw +MTExMzI4MDBaMHUxCzAJBgNVBAYTAlVTMRYwFAYDVQQIEw1TYW4gRnJhbmNpc2Nv +MQswCQYDVQQHEwJDQTEYMBYGA1UEChMPTXkgQ29tcGFueSBOYW1lMRMwEQYDVQQL +EwpPcmcgVW5pdCAxMRIwEAYDVQQDEwlNeSBvd24gQ0EwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDny5N5bkLtGSN475J4GwGEzOTJOsvAu3JU5JNjzlsb +GLLpRJPWMWk+p2f4FkkSP+Yz1AnsxfG1gke1ekR4B6GOfSFT2fajOrT0GWqVdS8h +Xhm56G6VF1YQYhvlWSu4Zd4bhJG70iLDzvHTDwBn514r1PVkr055OvA919C0M69r +MN0alkFEbIYqWO4jUsYBdLARrD23WswSq5s0AktmeknExxJ/EeJs3mRJqiqdtD4/ +GW0LCckM0a00gjOnhDWAbx9qdSIOUh20Ruuya3AiBtYpmdyodRFklghlnvzmoTGS +7jJ1oPLXVr9WZDk+0nsHCjF2rjmApdwcf9FpqatikXL7AgMBAAGjQjBAMA4GA1Ud +DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTjbc5p6QbiplBV +VxQ2gmUJ+VTciDANBgkqhkiG9w0BAQsFAAOCAQEAFRsqSDiq5+Yvt8DC/5h5Ykgv +l41W8VQK1xlc2DKIfZ/Rnf1PP4kxxv0KyFtPAUuDeuJSJqaHsC4l9itLWMhM1M7K +g5qlrYP128C+KdC3cSkP8XttzVkhF/ffLWLPENRgRV2DldRW8G/omVbBeXdIKbK5 +AYGEkliVK+zilNYax9VapgBdsAZEu/8O93/zWxVh1THa1PUvgLVy+xRNxhT3NenF +T/AMRPoRCyy3M0CsBC/k0uqtCGBB6n6HLj0kTG8cY1KiVu3aB+P8yUikxNMpYNgw +l2/J0nlPbsRiYPprT1PDcMEUto+ehGcrWZ6nSzbBEvRLeMvJhcJMNnKNKCsS8w== +-----END CERTIFICATE----- diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/client-key.pem b/matrix-authentication-service/crates/listener/examples/demo/certs/client-key.pem new file mode 100644 index 00000000..dae5e5f0 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/client-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAzZAP8nRL/+enJwbTKDm6gyd6tmU6o+8YWUMJdriXmPhOlePA +nrWWKbFlVhZhCkjoOWR4K94rCFMZ2rjsxIEBBO46SjKZ2D5La6hSOmXxbCevZRTC +c+8rRX7fwF8zsvTdaSgCAW8OKlqMWIwm/2d6tvzakEwZsk8gJdqjww+VYIxvmbFU +dCe4I20PrfkARa/BH7ZkFGCyQgn7riaZmRUIqKcBWd5uEMhHTOySXXRrmS7vfyZ0 +/X01U0nBqhxUgtDhKc1hll3lQ2BjvkU2dbt9+mWD4XZDsWenbMU+9sAX9rQTgqzD +YgJGotUK68n/XqNuDNEyOubdDyLICLD92ItnDQIDAQABAoIBADmrWu34NoIaqUhH +n+G/IFY/MywMhkELiNcx+Wu3KcCemN1wQc/EvdYAkJ9wM9VA0vWW/CfCcmwpdC1q +h/IxBuotM2kxfPuvrlULqdX8V5iyIYDILC1+QbODfp5nlwdzrtIbiUSBtYWoVYtZ +9m7cxw6jLWYiE2t0y14TUrIcoxmsiymmAemt1/8EuxU6ZIw4TlMAPcYxzy36iDJK +9er7iUeTl7GY/gojmD//tO92qjbOzTboAvL5NaYmAVTJTJg6z8c59884KgIx68gQ +R8gBmFJSNLm/+n9jkSmfABghJr18f2+Ys0d/d1ckITbzIbOUNhmtuYjD/t0UPdD5 +cMUVWvkCgYEA/KtMrhle9rFz1ttqg449SxgYfgyXCxxDwdQYoidF5EgkgLrACcY+ +eheCKaTiwGG7oT9j4Uak38sSrAYy3E3s82bhhzOd+CoSaRumW9VQ3WVaBSF80HLi +3gykSTP4QMzGGa6jSsXfPriugX9cF4tNfNbSB28GjAh4fqsakdPB7XMCgYEA0EXK +GoKNsjNj/KxGHX+LtgEtGzZwDJH+KzFP0ow3SYmgyFbt1MdFdX7SWZVnSi0a3MaG +GEDo3eGcGAYtHoRf7rxMFC4eZRZ3FPqd9w4BFN+j8cJ/q6vuA4grvakkz3gUZG0j +sOoSK/DJGrAQxnRgshxL0Fd0DSzUEqgW4o6oOX8CgYEAhSEw5u7BRZRcZ9H2flic +3QtWJFw33YfH/8HkNNQilFSavyUm+D93PddTIuQZAaq9NQn0c4dIag5SyUb+12tL +tTf5DsbYriBk0PLbpblwwSac1uU9IYvXE45vpY53eJUsr+1/Zm954E9oyxyzBkjE +zElYIsiSF4iDDKLU/g8oOBcCgYBTCiBkpXz9egP5sG5cQIhhzuI/IVtXh7YBXq3m +0sUQavFSL2awGauWBzSzRyBhsM4vDHBWpzqxjMyBv6SpsDnXo/fpa+HuiCB+mtX0 +tP61Zd2l/NiOiARkIBzgh9oHZmcrC2DZntoT7vMf0uc9WRVcrm+D5/p7bk44ChDl +z98+3QKBgQDUoPGFF4j/pQPJztTDr3aXMUrHw7jTc7zilcYAkINijLhjgEEasQHU +AnQLRhRt7W8M++9Jjv42rvXVi/0lZs/bv1znnNde2w40W4rBPcairbVSiv+nug81 +fD1DeBwtqnApSAurg6LOoMvcC1XmwJpgNqOgwtirN1df/fLMwltm7A== +-----END RSA PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/client.csr b/matrix-authentication-service/crates/listener/examples/demo/certs/client.csr new file mode 100644 index 00000000..780d49d1 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/client.csr @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICtTCCAZ0CAQAwQzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMRYwFAYDVQQH +Ew1TYW4gRnJhbmNpc2NvMQ8wDQYDVQQDEwZjbGllbnQwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDNkA/ydEv/56cnBtMoObqDJ3q2ZTqj7xhZQwl2uJeY ++E6V48CetZYpsWVWFmEKSOg5ZHgr3isIUxnauOzEgQEE7jpKMpnYPktrqFI6ZfFs +J69lFMJz7ytFft/AXzOy9N1pKAIBbw4qWoxYjCb/Z3q2/NqQTBmyTyAl2qPDD5Vg +jG+ZsVR0J7gjbQ+t+QBFr8EftmQUYLJCCfuuJpmZFQiopwFZ3m4QyEdM7JJddGuZ +Lu9/JnT9fTVTScGqHFSC0OEpzWGWXeVDYGO+RTZ1u336ZYPhdkOxZ6dsxT72wBf2 +tBOCrMNiAkai1Qrryf9eo24M0TI65t0PIsgIsP3Yi2cNAgMBAAGgLTArBgkqhkiG +9w0BCQ4xHjAcMBoGA1UdEQQTMBGCCWxvY2FsaG9zdIcEfwAAATANBgkqhkiG9w0B +AQsFAAOCAQEAZi9gWV6e5cYNRpznUh82ASNHhF2FhA7wwjyK1I+4uJ47ZEPnle1G +j4x+7DWveX6b6DdMxzJdu4mXlYbAxqeCqBkBRS5tq03ZbioAuzjo4987jO5XO1SO +X+1VRIWWEP71Nov4v/2izZeH3XA1yGsb64ThVWeeytdMll/Ih93T9xb+O9i5ppuj +I/KtQodDPJpRZ1fQm7fCekt3dZxw/o57NmtcDk0/VaKqfajk+/Lxz5s2j+Ic+882 +3XvXqnDpo3IxKhOXag/vuBlYh8stZr/NTlblN1kVvBr5hwFnQPjO4cYs8WDpGy4R +LfKf3YyAGNwHDX43RGjUxmMfIgcDuvzWTg== +-----END CERTIFICATE REQUEST----- diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/client.json b/matrix-authentication-service/crates/listener/examples/demo/certs/client.json new file mode 100644 index 00000000..bc2d3d5e --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/client.json @@ -0,0 +1,18 @@ +{ + "CN": "client", + "hosts": [ + "localhost", + "127.0.0.1" + ], + "key": { + "algo": "rsa", + "size": 2048 + }, + "names": [ + { + "C": "US", + "ST": "CA", + "L": "San Francisco" + } + ] +} diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/client.pem b/matrix-authentication-service/crates/listener/examples/demo/certs/client.pem new file mode 100644 index 00000000..0fae8530 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/client.pem @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID2TCCAsGgAwIBAgIUPlKsaVgzM0KLAHeCoQElYYBk9rIwDQYJKoZIhvcNAQEL +BQAwdTELMAkGA1UEBhMCVVMxFjAUBgNVBAgTDVNhbiBGcmFuY2lzY28xCzAJBgNV +BAcTAkNBMRgwFgYDVQQKEw9NeSBDb21wYW55IE5hbWUxEzARBgNVBAsTCk9yZyBV +bml0IDExEjAQBgNVBAMTCU15IG93biBDQTAeFw0yMjEwMTIxMzI4MDBaFw0yNzEw +MTExMzI4MDBaMEMxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTEWMBQGA1UEBxMN +U2FuIEZyYW5jaXNjbzEPMA0GA1UEAxMGY2xpZW50MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAzZAP8nRL/+enJwbTKDm6gyd6tmU6o+8YWUMJdriXmPhO +lePAnrWWKbFlVhZhCkjoOWR4K94rCFMZ2rjsxIEBBO46SjKZ2D5La6hSOmXxbCev +ZRTCc+8rRX7fwF8zsvTdaSgCAW8OKlqMWIwm/2d6tvzakEwZsk8gJdqjww+VYIxv +mbFUdCe4I20PrfkARa/BH7ZkFGCyQgn7riaZmRUIqKcBWd5uEMhHTOySXXRrmS7v +fyZ0/X01U0nBqhxUgtDhKc1hll3lQ2BjvkU2dbt9+mWD4XZDsWenbMU+9sAX9rQT +gqzDYgJGotUK68n/XqNuDNEyOubdDyLICLD92ItnDQIDAQABo4GSMIGPMA4GA1Ud +DwEB/wQEAwIFoDATBgNVHSUEDDAKBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0G +A1UdDgQWBBQTZnmyh9yldA1I/p45TvZTJwYeGTAfBgNVHSMEGDAWgBTjbc5p6Qbi +plBVVxQ2gmUJ+VTciDAaBgNVHREEEzARgglsb2NhbGhvc3SHBH8AAAEwDQYJKoZI +hvcNAQELBQADggEBAF9wkW1bVCi4HW+3IQR8eVhfwAr6PILhwdVvW7iJyXv8c/oa +NP5SQeunvRXYZqUvplWCRF6GDfN2OXe/RXCKCevvHyU1kihoYEndMx2ETqJiNJEf +kXMdhHLqu9lx2pZ8uPJjsXbhT4T//fCtWhUZjsSKDa2Paa72jTzGbGwkD6lY3Fz6 +KOAPeKiRecoY55w/NlXnVoqPhJ0qSIWl7F0PrgUPWFoOaRev6q9U/zDLWLnaWVWS +iA3eNSZSISm9vPqodt+FRJhTU8CYkY20fqBlfXRrnTeKS/Ydr6axNXRQxIjazs77 +/XMw/YTeYzzimRkfUpQzBbe1wOL7yKA6IdaYhrs= +-----END CERTIFICATE----- diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/config.json b/matrix-authentication-service/crates/listener/examples/demo/certs/config.json new file mode 100644 index 00000000..c1b7aa80 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/config.json @@ -0,0 +1,25 @@ +{ + "signing": { + "default": { + "expiry": "43800h" + }, + "profiles": { + "server": { + "expiry": "43800h", + "usages": [ + "signing", + "key encipherment", + "server auth" + ] + }, + "client": { + "expiry": "43800h", + "usages": [ + "signing", + "key encipherment", + "client auth" + ] + } + } + } +} diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/gen.sh b/matrix-authentication-service/crates/listener/examples/demo/certs/gen.sh new file mode 100644 index 00000000..019279ec --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/gen.sh @@ -0,0 +1,15 @@ +#!/bin/sh +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +# Script to regenerate the server and client certificate + +set -eux + +cd "$(dirname "$0")" +rm -f ./*.pem ./*.csr +cfssl gencert -config=config.json -initca ca.json | cfssljson -bare ca +cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=config.json -profile=server server.json | cfssljson -bare server +cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=config.json -profile=client client.json | cfssljson -bare client diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/server-key.pem b/matrix-authentication-service/crates/listener/examples/demo/certs/server-key.pem new file mode 100644 index 00000000..c13612d8 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/server-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAyZLpvV/upF9rm5WgWsWuXJDmST615NKEJ8PgahH7ix9G9LrG +h9ZDsAcrDbjOuIMx9SN4TH8ZOBVqG2RFOsvhyaYgRoy1ofq6Vgkay35iLVvw6cKH +qQ/T6Lt1ku8j1kS7y2PF3na/yXRAoCroXV8wKusKeXJmNc/cr0paucOYwOOhPsvh +4OA0BZ9yhosuAHwJ+HShujrk9UK7qUOj+xUBQpZBlun9bu5vwI1r6uLH+EwgoJ4V +iSj9Mio8fyc7GyHp7qreUmW59xKWoupsWrlfS5cnSGRiDMdOkoui9p2kO6o119aD +liHlAWkKK+hGHJIlMDAsH0jZT7y/KmYSAo/X6QIDAQABAoIBAHDTrceVSdNxoZ7N +ipskaStg47V9x3xUJSrI5fUZKa4+jI3xeayQzwRZjsy4c+Utciofd3eB8NDGk8TP +RDzb3/7p4Mj8e7I10FTV9cyPak6vVtLRUvPbayaqvu3Gs0183YzDxP53g3Q0gPPl +8HhLDoAHXa6KzREzzvfC67Ns+zSDdi1AOjZiOplgirG7t4qOKpRdJ1c+1e9l4ifF +838Qh4ZdrDiYYsM1ixyWMaBKTeLjn8GAllFm3a4Ayjwf0ooiNMm3BmeqHBLRt7oG +faGoEJjCYsYUiWYwAYdnvJjk0lNGqdmEvr3YwcccncNIhsqW1vyNSLq/rrAG0uDZ +O9Z9UYECgYEA9Xwr6y3nxnb8ygv4bFI4DJpAY7Mz7mrNNd+umfSbdPGTPqWU3iH+ +FwV1DOcxFO620iF40y85pfdNrnIyZir5/s9B+wLufC8yHvdPuGDENP1o3K5EvJIj +7pivcsSdAa8/N7f5f1aRmqLgudaFvBqqVkGe5TkDee5sHQkjBKU7W/kCgYEA0jU9 +iGAaJjIVL2rONM59AwYmCSPXT6hHxfdOUm+vhjmKfffv5ounhYMm1/ApRXafO5q6 +4IKHXxFkCbMyIM3QwwtlZdJI+zYbZFH2FauaVB+AF5Wc+w3NaN757K3LfZTj4kyQ +l0dSCwR4L9djp7jTuDamEIc0QfsZ6fbNe+xX93ECgYAT49GzJm8HF5D31ex06lx8 +OOtKqLRmduTVnqAI/VazLPefNc9QCDUMLHcFap4Bci4B7JBbnBHxro3uunX27TiA +Os6/xccI7NIEzEj7SWvcV0PtzXjoRnb+2AQvKlsGTeqzWwauGJeHjfbjV8xSJ17x +yjNTo0Dy2iyMVbcuoyyiEQKBgQDLz/E8ZCmWdSLTWdRboQXWw8RnQkgGJRyKFpHr +HfzqwKnGH3qMZ0XjDtm/r0zk2/HiAdFF02lbxOng+c0Vv1i1dDw5MF2wrLJ8X3eh +ZUP6Ypx4wYh2ZtiN4Pwj/hJ6Tb1yclgTRYSHyCqcAFPQkEU/rETxa5ZAjy1+Ct0L +VYmpEQKBgQC3fwjnDk6CmT9p140J7PBybJ/yTl266y6j35JDqGqEupc7DNb0C8dc +3IbOF+yWd9nxJ66URBPen83wn5864hmTeU9rmDRrh2jkzxKuyOFbPBtM1Q7Uy6it +HpDDA/ky4m2sVyuv4TuE93WkDQbXlxtYc0wnCWx0mkPIYkAxmUO4Rg== +-----END RSA PRIVATE KEY----- diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/server.csr b/matrix-authentication-service/crates/listener/examples/demo/certs/server.csr new file mode 100644 index 00000000..ab7a923c --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/server.csr @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICuDCCAaACAQAwRjELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMRYwFAYDVQQH +Ew1TYW4gRnJhbmNpc2NvMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDJkum9X+6kX2ublaBaxa5ckOZJPrXk0oQnw+Bq +EfuLH0b0usaH1kOwBysNuM64gzH1I3hMfxk4FWobZEU6y+HJpiBGjLWh+rpWCRrL +fmItW/DpwoepD9Pou3WS7yPWRLvLY8Xedr/JdECgKuhdXzAq6wp5cmY1z9yvSlq5 +w5jA46E+y+Hg4DQFn3KGiy4AfAn4dKG6OuT1QrupQ6P7FQFClkGW6f1u7m/AjWvq +4sf4TCCgnhWJKP0yKjx/JzsbIenuqt5SZbn3Epai6mxauV9LlydIZGIMx06Si6L2 +naQ7qjXX1oOWIeUBaQor6EYckiUwMCwfSNlPvL8qZhICj9fpAgMBAAGgLTArBgkq +hkiG9w0BCQ4xHjAcMBoGA1UdEQQTMBGCCWxvY2FsaG9zdIcEfwAAATANBgkqhkiG +9w0BAQsFAAOCAQEAoDYizxvrx9zCwJwVkoyTesNpv/TEXSyUJUA0obAwCmRxYfAI +8/C3OglQwlrMKTgeBsfzBnLHgdZ4mKmuQpRNGrt+MncN09x7IqT4zbijWBJu6VbI +a7B+BElzrt/rsEo/h2ZKy1P42XIW/icADRFoCDqhOG3kYQ5unIoNawN/4okJDxg6 +z+M5FSifRee3QSc9UOHIGNTuVS07Gxmhoi+c9samuxZYqxR1j46LGY4OOWEW8RVB +ZhybsfhXgzkoAvIjCJiNqJGsNmMlr6Psq1cKCTaM17RlxlqSAtlQ2igk1ptAo7Xo +q+EVnJHmkWbjksQKykOia91eOOlGArZfSGBgYw== +-----END CERTIFICATE REQUEST----- diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/server.json b/matrix-authentication-service/crates/listener/examples/demo/certs/server.json new file mode 100644 index 00000000..166acfa9 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/server.json @@ -0,0 +1,18 @@ +{ + "CN": "localhost", + "hosts": [ + "localhost", + "127.0.0.1" + ], + "key": { + "algo": "rsa", + "size": 2048 + }, + "names": [ + { + "C": "US", + "ST": "CA", + "L": "San Francisco" + } + ] +} diff --git a/matrix-authentication-service/crates/listener/examples/demo/certs/server.pem b/matrix-authentication-service/crates/listener/examples/demo/certs/server.pem new file mode 100644 index 00000000..ac509a48 --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/certs/server.pem @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID3DCCAsSgAwIBAgIUXV73OL40WuMFPhEf1BT5I9wWilQwDQYJKoZIhvcNAQEL +BQAwdTELMAkGA1UEBhMCVVMxFjAUBgNVBAgTDVNhbiBGcmFuY2lzY28xCzAJBgNV +BAcTAkNBMRgwFgYDVQQKEw9NeSBDb21wYW55IE5hbWUxEzARBgNVBAsTCk9yZyBV +bml0IDExEjAQBgNVBAMTCU15IG93biBDQTAeFw0yMjEwMTIxMzI4MDBaFw0yNzEw +MTExMzI4MDBaMEYxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTEWMBQGA1UEBxMN +U2FuIEZyYW5jaXNjbzESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEAyZLpvV/upF9rm5WgWsWuXJDmST615NKEJ8PgahH7 +ix9G9LrGh9ZDsAcrDbjOuIMx9SN4TH8ZOBVqG2RFOsvhyaYgRoy1ofq6Vgkay35i +LVvw6cKHqQ/T6Lt1ku8j1kS7y2PF3na/yXRAoCroXV8wKusKeXJmNc/cr0paucOY +wOOhPsvh4OA0BZ9yhosuAHwJ+HShujrk9UK7qUOj+xUBQpZBlun9bu5vwI1r6uLH ++EwgoJ4ViSj9Mio8fyc7GyHp7qreUmW59xKWoupsWrlfS5cnSGRiDMdOkoui9p2k +O6o119aDliHlAWkKK+hGHJIlMDAsH0jZT7y/KmYSAo/X6QIDAQABo4GSMIGPMA4G +A1UdDwEB/wQEAwIFoDATBgNVHSUEDDAKBggrBgEFBQcDATAMBgNVHRMBAf8EAjAA +MB0GA1UdDgQWBBQ5FqZm6QZH0ryYjHsPfPLLco+hHTAfBgNVHSMEGDAWgBTjbc5p +6QbiplBVVxQ2gmUJ+VTciDAaBgNVHREEEzARgglsb2NhbGhvc3SHBH8AAAEwDQYJ +KoZIhvcNAQELBQADggEBAAESeOqIzNByls+z+Ah8i5Ge4MfkomD2dHipvJNOKtY4 +JUxffHslgid6O4zE5uw4mLnM4tvaUhsO1DwyfqQ0dj0JAx0xOSZuPfXag1fHxJ4Q +YJImrP13Hcm18Jr/ie5En6v25Uq0DR5NqbqSBXdIwQB84yAV23555YU9sqJhDh4g +wTugRTcNefTIO4lD0eFu4PLGyt7J6KNdur9n4RrTJzIoJx7cK+vrAAHhQKzetLQm +VnHs2U7ckgNLEjxo/9qziQ5bPXb4MnsrZgN00oeDwEMfIkANmSPUu/6Ei31SXfmE +s5ukdV0z+OX59/vEsG3IPiZpZG/dOjBeFjPvG/7EOoc= +-----END CERTIFICATE----- diff --git a/matrix-authentication-service/crates/listener/examples/demo/main.rs b/matrix-authentication-service/crates/listener/examples/demo/main.rs new file mode 100644 index 00000000..f418831d --- /dev/null +++ b/matrix-authentication-service/crates/listener/examples/demo/main.rs @@ -0,0 +1,106 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + convert::Infallible, + net::{Ipv4Addr, TcpListener}, + sync::Arc, + time::Duration, +}; + +use anyhow::Context; +use hyper::{Request, Response}; +use mas_listener::{ConnectionInfo, server::Server}; +use tokio_rustls::rustls::{ + RootCertStore, ServerConfig, + pki_types::{CertificateDer, PrivateKeyDer, PrivatePkcs1KeyDer, pem::PemObject}, + server::WebPkiClientVerifier, +}; +use tokio_util::sync::CancellationToken; +use tower::service_fn; + +static CA_CERT_PEM: &[u8] = include_bytes!("./certs/ca.pem"); +static SERVER_CERT_PEM: &[u8] = include_bytes!("./certs/server.pem"); +static SERVER_KEY_PEM: &[u8] = include_bytes!("./certs/server-key.pem"); + +async fn handler(req: Request) -> Result, Infallible> { + tracing::info!("Handling request"); + tokio::time::sleep(Duration::from_secs(3)).await; + let info = req.extensions().get::().unwrap(); + let body = format!("{info:?}"); + Ok(Response::new(body)) +} + +#[tokio::main] +async fn main() -> Result<(), anyhow::Error> { + tracing_subscriber::fmt::init(); + + let tls_config = load_tls_config()?; + + let listener = TcpListener::bind((Ipv4Addr::LOCALHOST, 3000))?; + let proxy_protocol_listener = TcpListener::bind((Ipv4Addr::LOCALHOST, 3001))?; + let tls_listener = TcpListener::bind((Ipv4Addr::LOCALHOST, 3002))?; + let tls_proxy_protocol_listener = TcpListener::bind((Ipv4Addr::LOCALHOST, 3003))?; + + let servers = vec![ + Server::try_new(listener, service_fn(handler))?, + Server::try_new(proxy_protocol_listener, service_fn(handler))?.with_proxy(), + Server::try_new(tls_listener, service_fn(handler))?.with_tls(tls_config.clone()), + Server::try_new(tls_proxy_protocol_listener, service_fn(handler))? + .with_proxy() + .with_tls(tls_config.clone()), + ]; + + tracing::info!( + "Listening on http://127.0.0.1:3000, http(proxy)://127.0.0.1:3001, https://127.0.0.1:3002 and https(proxy)://127.0.0.1:3003" + ); + + let hard_shutdown = CancellationToken::new(); + let soft_shutdown = hard_shutdown.child_token(); + + { + let hard_shutdown = hard_shutdown.clone(); + let soft_shutdown = soft_shutdown.clone(); + tokio::spawn(async move { + tokio::signal::ctrl_c().await.unwrap(); + tracing::info!("Ctrl-C received, performing soft-shutdown"); + soft_shutdown.cancel(); + tokio::signal::ctrl_c().await.unwrap(); + tracing::info!("Ctrl-C received again, shutting down"); + hard_shutdown.cancel(); + }); + } + + mas_listener::server::run_servers(servers, hard_shutdown, soft_shutdown).await; + + Ok(()) +} + +fn load_tls_config() -> Result, anyhow::Error> { + let ca_cert = CertificateDer::pem_slice_iter(CA_CERT_PEM) + .collect::, _>>() + .context("Invalid CA certificate")?; + let mut ca_cert_store = RootCertStore::empty(); + ca_cert_store.add_parsable_certificates(ca_cert); + + let server_cert: Vec<_> = CertificateDer::pem_slice_iter(SERVER_CERT_PEM) + .collect::, _>>() + .context("Invalid server certificate")?; + + let server_key = + PrivatePkcs1KeyDer::from_pem_slice(SERVER_KEY_PEM).context("Invalid server TLS keys")?; + + let client_cert_verifier = WebPkiClientVerifier::builder(Arc::new(ca_cert_store)) + .allow_unauthenticated() + .build()?; + + let mut config = ServerConfig::builder() + .with_client_cert_verifier(client_cert_verifier) + .with_single_cert(server_cert, PrivateKeyDer::Pkcs1(server_key))?; + config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()]; + + Ok(Arc::new(config)) +} diff --git a/matrix-authentication-service/crates/listener/src/lib.rs b/matrix-authentication-service/crates/listener/src/lib.rs new file mode 100644 index 00000000..1618365b --- /dev/null +++ b/matrix-authentication-service/crates/listener/src/lib.rs @@ -0,0 +1,49 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![deny(rustdoc::missing_crate_level_docs)] +#![allow(clippy::module_name_repetitions)] + +//! An utility crate to build flexible [`hyper`] listeners, with optional TLS +//! and proxy protocol support. + +use self::{maybe_tls::TlsStreamInfo, proxy_protocol::ProxyProtocolV1Info}; + +pub mod maybe_tls; +pub mod proxy_protocol; +pub mod rewind; +pub mod server; +pub mod unix_or_tcp; + +#[derive(Debug, Clone)] +pub struct ConnectionInfo { + tls: Option, + proxy: Option, + net_peer_addr: Option, +} + +impl ConnectionInfo { + /// Returns informations about the TLS connection. Returns [`None`] if the + /// connection was not TLS. + #[must_use] + pub fn get_tls_ref(&self) -> Option<&TlsStreamInfo> { + self.tls.as_ref() + } + + /// Returns informations about the proxy protocol connection. Returns + /// [`None`] if the connection was not using the proxy protocol. + #[must_use] + pub fn get_proxy_ref(&self) -> Option<&ProxyProtocolV1Info> { + self.proxy.as_ref() + } + + /// Returns the remote peer address. Returns [`None`] if the connection was + /// established via a UNIX domain socket. + #[must_use] + pub fn get_peer_addr(&self) -> Option { + self.net_peer_addr + } +} diff --git a/matrix-authentication-service/crates/listener/src/maybe_tls.rs b/matrix-authentication-service/crates/listener/src/maybe_tls.rs new file mode 100644 index 00000000..02fec894 --- /dev/null +++ b/matrix-authentication-service/crates/listener/src/maybe_tls.rs @@ -0,0 +1,223 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + pin::Pin, + sync::Arc, + task::{Context, Poll}, +}; + +use tokio::io::{AsyncRead, AsyncWrite, ReadBuf}; +use tokio_rustls::{ + TlsAcceptor, + rustls::{ + ProtocolVersion, ServerConfig, ServerConnection, SupportedCipherSuite, + pki_types::CertificateDer, + }, +}; + +#[derive(Debug, Clone)] +#[non_exhaustive] +pub struct TlsStreamInfo { + pub protocol_version: ProtocolVersion, + pub negotiated_cipher_suite: SupportedCipherSuite, + pub sni_hostname: Option, + pub alpn_protocol: Option>, + pub peer_certificates: Option>>, +} + +impl TlsStreamInfo { + #[must_use] + pub fn is_alpn_h2(&self) -> bool { + matches!(self.alpn_protocol.as_deref(), Some(b"h2")) + } +} + +pin_project_lite::pin_project! { + #[project = MaybeTlsStreamProj] + pub enum MaybeTlsStream { + Secure { + #[pin] + stream: tokio_rustls::server::TlsStream + }, + Insecure { + #[pin] + stream: T, + }, + } +} + +impl MaybeTlsStream { + /// Get a reference to the underlying IO stream + /// + /// Returns [`None`] if the stream closed before the TLS handshake finished. + /// It is guaranteed to return [`Some`] value after the handshake finished, + /// or if it is a non-TLS connection. + pub fn get_ref(&self) -> &T { + match self { + Self::Secure { stream } => stream.get_ref().0, + Self::Insecure { stream } => stream, + } + } + + /// Get a ref to the [`ServerConnection`] of the establish TLS stream. + /// + /// Returns [`None`] for non-TLS connections. + pub fn get_tls_connection(&self) -> Option<&ServerConnection> { + match self { + Self::Secure { stream } => Some(stream.get_ref().1), + Self::Insecure { .. } => None, + } + } + + /// Gather informations about the TLS connection. Returns `None` if the + /// stream is not a TLS stream. + /// + /// # Panics + /// + /// Panics if the TLS handshake is not done yet, which should never happen + pub fn tls_info(&self) -> Option { + let conn = self.get_tls_connection()?; + + // SAFETY: we're getting the protocol version and cipher suite *after* the + // handshake, so this should never lead to a panic + let protocol_version = conn + .protocol_version() + .expect("TLS handshake is not done yet"); + let negotiated_cipher_suite = conn + .negotiated_cipher_suite() + .expect("TLS handshake is not done yet"); + + let sni_hostname = conn.server_name().map(ToOwned::to_owned); + let alpn_protocol = conn.alpn_protocol().map(ToOwned::to_owned); + let peer_certificates = conn.peer_certificates().map(|certs| { + certs + .iter() + .cloned() + .map(CertificateDer::into_owned) + .collect() + }); + Some(TlsStreamInfo { + protocol_version, + negotiated_cipher_suite, + sni_hostname, + alpn_protocol, + peer_certificates, + }) + } +} + +impl AsyncRead for MaybeTlsStream +where + T: AsyncRead + AsyncWrite + Unpin, +{ + fn poll_read( + self: Pin<&mut Self>, + cx: &mut Context, + buf: &mut ReadBuf, + ) -> Poll> { + match self.project() { + MaybeTlsStreamProj::Secure { stream } => stream.poll_read(cx, buf), + MaybeTlsStreamProj::Insecure { stream } => stream.poll_read(cx, buf), + } + } +} + +impl AsyncWrite for MaybeTlsStream +where + T: AsyncRead + AsyncWrite + Unpin, +{ + fn poll_write( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &[u8], + ) -> Poll> { + match self.project() { + MaybeTlsStreamProj::Secure { stream } => stream.poll_write(cx, buf), + MaybeTlsStreamProj::Insecure { stream } => stream.poll_write(cx, buf), + } + } + + fn poll_write_vectored( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + bufs: &[std::io::IoSlice<'_>], + ) -> Poll> { + match self.project() { + MaybeTlsStreamProj::Secure { stream } => stream.poll_write_vectored(cx, bufs), + MaybeTlsStreamProj::Insecure { stream } => stream.poll_write_vectored(cx, bufs), + } + } + + fn is_write_vectored(&self) -> bool { + match self { + Self::Secure { stream } => stream.is_write_vectored(), + Self::Insecure { stream } => stream.is_write_vectored(), + } + } + + fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + match self.project() { + MaybeTlsStreamProj::Secure { stream } => stream.poll_flush(cx), + MaybeTlsStreamProj::Insecure { stream } => stream.poll_flush(cx), + } + } + + fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + match self.project() { + MaybeTlsStreamProj::Secure { stream } => stream.poll_shutdown(cx), + MaybeTlsStreamProj::Insecure { stream } => stream.poll_shutdown(cx), + } + } +} + +#[derive(Clone)] +pub struct MaybeTlsAcceptor { + tls_config: Option>, +} + +impl MaybeTlsAcceptor { + #[must_use] + pub fn new(tls_config: Option>) -> Self { + Self { tls_config } + } + + #[must_use] + pub fn new_secure(tls_config: Arc) -> Self { + Self { + tls_config: Some(tls_config), + } + } + + #[must_use] + pub fn new_insecure() -> Self { + Self { tls_config: None } + } + + #[must_use] + pub const fn is_secure(&self) -> bool { + self.tls_config.is_some() + } + + /// Accept a connection and do the TLS handshake + /// + /// # Errors + /// + /// Returns an error if the TLS handshake failed + pub async fn accept(&self, stream: T) -> Result, std::io::Error> + where + T: AsyncRead + AsyncWrite + Unpin, + { + match &self.tls_config { + Some(config) => { + let acceptor = TlsAcceptor::from(config.clone()); + let stream = acceptor.accept(stream).await?; + Ok(MaybeTlsStream::Secure { stream }) + } + None => Ok(MaybeTlsStream::Insecure { stream }), + } + } +} diff --git a/matrix-authentication-service/crates/listener/src/proxy_protocol/acceptor.rs b/matrix-authentication-service/crates/listener/src/proxy_protocol/acceptor.rs new file mode 100644 index 00000000..d62ab618 --- /dev/null +++ b/matrix-authentication-service/crates/listener/src/proxy_protocol/acceptor.rs @@ -0,0 +1,60 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use bytes::BytesMut; +use thiserror::Error; +use tokio::io::{AsyncRead, AsyncReadExt}; + +use super::ProxyProtocolV1Info; +use crate::rewind::Rewind; + +#[derive(Clone, Copy, Debug, Default)] +pub struct ProxyAcceptor { + _private: (), +} + +#[derive(Debug, Error)] +#[error(transparent)] +pub enum ProxyAcceptError { + Parse(#[from] super::v1::ParseError), + Read(#[from] std::io::Error), +} + +impl ProxyAcceptor { + #[must_use] + pub const fn new() -> Self { + Self { _private: () } + } + + /// Accept a proxy-protocol stream + /// + /// # Errors + /// + /// Returns an error on read error on the underlying stream, or when the + /// proxy protocol preamble couldn't be parsed + pub async fn accept( + &self, + mut stream: T, + ) -> Result<(ProxyProtocolV1Info, Rewind), ProxyAcceptError> + where + T: AsyncRead + Unpin, + { + let mut buf = BytesMut::new(); + let info = loop { + stream.read_buf(&mut buf).await?; + + match ProxyProtocolV1Info::parse(&mut buf) { + Ok(info) => break info, + Err(e) if e.not_enough_bytes() => {} + Err(e) => return Err(e.into()), + } + }; + + let stream = Rewind::new_buffered(stream, buf.into()); + + Ok((info, stream)) + } +} diff --git a/matrix-authentication-service/crates/listener/src/proxy_protocol/maybe.rs b/matrix-authentication-service/crates/listener/src/proxy_protocol/maybe.rs new file mode 100644 index 00000000..47c4860e --- /dev/null +++ b/matrix-authentication-service/crates/listener/src/proxy_protocol/maybe.rs @@ -0,0 +1,66 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use tokio::io::AsyncRead; + +use super::{ProxyAcceptor, ProxyProtocolV1Info, acceptor::ProxyAcceptError}; +use crate::rewind::Rewind; + +#[derive(Clone, Copy)] +pub struct MaybeProxyAcceptor { + acceptor: Option, +} + +impl MaybeProxyAcceptor { + #[must_use] + pub const fn new(proxied: bool) -> Self { + let acceptor = if proxied { + Some(ProxyAcceptor::new()) + } else { + None + }; + + Self { acceptor } + } + + #[must_use] + pub const fn new_proxied(acceptor: ProxyAcceptor) -> Self { + Self { + acceptor: Some(acceptor), + } + } + + #[must_use] + pub const fn new_unproxied() -> Self { + Self { acceptor: None } + } + + #[must_use] + pub const fn is_proxied(&self) -> bool { + self.acceptor.is_some() + } + + /// Accept a connection and do the proxy protocol handshake + /// + /// # Errors + /// + /// Returns an error if the proxy protocol handshake failed + pub async fn accept( + &self, + stream: T, + ) -> Result<(Option, Rewind), ProxyAcceptError> + where + T: AsyncRead + Unpin, + { + if let Some(acceptor) = self.acceptor { + let (info, stream) = acceptor.accept(stream).await?; + Ok((Some(info), stream)) + } else { + let stream = Rewind::new(stream); + Ok((None, stream)) + } + } +} diff --git a/matrix-authentication-service/crates/listener/src/proxy_protocol/mod.rs b/matrix-authentication-service/crates/listener/src/proxy_protocol/mod.rs new file mode 100644 index 00000000..5f9929fe --- /dev/null +++ b/matrix-authentication-service/crates/listener/src/proxy_protocol/mod.rs @@ -0,0 +1,15 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod acceptor; +mod maybe; +mod v1; + +pub use self::{ + acceptor::{ProxyAcceptError, ProxyAcceptor}, + maybe::MaybeProxyAcceptor, + v1::ProxyProtocolV1Info, +}; diff --git a/matrix-authentication-service/crates/listener/src/proxy_protocol/v1.rs b/matrix-authentication-service/crates/listener/src/proxy_protocol/v1.rs new file mode 100644 index 00000000..bb78aa65 --- /dev/null +++ b/matrix-authentication-service/crates/listener/src/proxy_protocol/v1.rs @@ -0,0 +1,296 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + net::{AddrParseError, Ipv4Addr, Ipv6Addr, SocketAddr}, + num::ParseIntError, + str::Utf8Error, +}; + +use bytes::Buf; +use thiserror::Error; + +#[derive(Debug, Clone)] +pub enum ProxyProtocolV1Info { + Tcp { + source: SocketAddr, + destination: SocketAddr, + }, + Udp { + source: SocketAddr, + destination: SocketAddr, + }, + Unknown, +} + +#[derive(Error, Debug)] +#[error("Invalid proxy protocol header")] +pub enum ParseError { + #[error("Not enough bytes provided")] + NotEnoughBytes, + NoCrLf, + NoProxyPreamble, + NoProtocol, + InvalidProtocol, + NoSourceAddress, + NoDestinationAddress, + NoSourcePort, + NoDestinationPort, + TooManyFields, + InvalidUtf8(#[from] Utf8Error), + InvalidAddress(#[from] AddrParseError), + InvalidPort(#[from] ParseIntError), +} + +impl ParseError { + pub const fn not_enough_bytes(&self) -> bool { + matches!(self, &Self::NotEnoughBytes) + } +} + +impl ProxyProtocolV1Info { + pub(super) fn parse(buf: &mut B) -> Result + where + B: Buf + AsRef<[u8]>, + { + use ParseError as E; + // First, check if we *possibly* have enough bytes. + // Minimum is 15: "PROXY UNKNOWN\r\n" + + if buf.remaining() < 15 { + return Err(E::NotEnoughBytes); + } + + // Let's check in the first 108 bytes if we find a CRLF + let Some(crlf) = buf + .as_ref() + .windows(2) + .take(108) + .position(|needle| needle == [0x0D, 0x0A]) + else { + // If not, it might be because we don't have enough bytes + return if buf.remaining() < 108 { + Err(E::NotEnoughBytes) + } else { + // Else it's just invalid + Err(E::NoCrLf) + }; + }; + + // Trim to everything before the CRLF + let bytes = &buf.as_ref()[..crlf]; + + let mut it = bytes.splitn(6, |c| c == &b' '); + // Check for the preamble + if it.next() != Some(b"PROXY") { + return Err(E::NoProxyPreamble); + } + + let result = match it.next() { + Some(b"TCP4") => { + let source_address: Ipv4Addr = + std::str::from_utf8(it.next().ok_or(E::NoSourceAddress)?)?.parse()?; + let destination_address: Ipv4Addr = + std::str::from_utf8(it.next().ok_or(E::NoDestinationAddress)?)?.parse()?; + let source_port: u16 = + std::str::from_utf8(it.next().ok_or(E::NoSourcePort)?)?.parse()?; + let destination_port: u16 = + std::str::from_utf8(it.next().ok_or(E::NoDestinationPort)?)?.parse()?; + if it.next().is_some() { + return Err(E::TooManyFields); + } + + let source = (source_address, source_port).into(); + let destination = (destination_address, destination_port).into(); + + Self::Tcp { + source, + destination, + } + } + Some(b"TCP6") => { + let source_address: Ipv6Addr = + std::str::from_utf8(it.next().ok_or(E::NoSourceAddress)?)?.parse()?; + let destination_address: Ipv6Addr = + std::str::from_utf8(it.next().ok_or(E::NoDestinationAddress)?)?.parse()?; + let source_port: u16 = + std::str::from_utf8(it.next().ok_or(E::NoSourcePort)?)?.parse()?; + let destination_port: u16 = + std::str::from_utf8(it.next().ok_or(E::NoDestinationPort)?)?.parse()?; + if it.next().is_some() { + return Err(E::TooManyFields); + } + + let source = (source_address, source_port).into(); + let destination = (destination_address, destination_port).into(); + + Self::Tcp { + source, + destination, + } + } + Some(b"UDP4") => { + let source_address: Ipv4Addr = + std::str::from_utf8(it.next().ok_or(E::NoSourceAddress)?)?.parse()?; + let destination_address: Ipv4Addr = + std::str::from_utf8(it.next().ok_or(E::NoDestinationAddress)?)?.parse()?; + let source_port: u16 = + std::str::from_utf8(it.next().ok_or(E::NoSourcePort)?)?.parse()?; + let destination_port: u16 = + std::str::from_utf8(it.next().ok_or(E::NoDestinationPort)?)?.parse()?; + if it.next().is_some() { + return Err(E::TooManyFields); + } + + let source = (source_address, source_port).into(); + let destination = (destination_address, destination_port).into(); + + Self::Udp { + source, + destination, + } + } + Some(b"UDP6") => { + let source_address: Ipv6Addr = + std::str::from_utf8(it.next().ok_or(E::NoSourceAddress)?)?.parse()?; + let destination_address: Ipv6Addr = + std::str::from_utf8(it.next().ok_or(E::NoDestinationAddress)?)?.parse()?; + let source_port: u16 = + std::str::from_utf8(it.next().ok_or(E::NoSourcePort)?)?.parse()?; + let destination_port: u16 = + std::str::from_utf8(it.next().ok_or(E::NoDestinationPort)?)?.parse()?; + if it.next().is_some() { + return Err(E::TooManyFields); + } + + let source = (source_address, source_port).into(); + let destination = (destination_address, destination_port).into(); + + Self::Udp { + source, + destination, + } + } + Some(b"UNKNOWN") => Self::Unknown, + Some(_) => return Err(E::InvalidProtocol), + None => return Err(E::NoProtocol), + }; + + buf.advance(crlf + 2); + + Ok(result) + } + + #[must_use] + pub fn is_ipv4(&self) -> bool { + match self { + Self::Udp { + source, + destination, + } + | Self::Tcp { + source, + destination, + } => source.is_ipv4() && destination.is_ipv4(), + Self::Unknown => false, + } + } + + #[must_use] + pub fn is_ipv6(&self) -> bool { + match self { + Self::Udp { + source, + destination, + } + | Self::Tcp { + source, + destination, + } => source.is_ipv6() && destination.is_ipv6(), + Self::Unknown => false, + } + } + + #[must_use] + pub const fn is_tcp(&self) -> bool { + matches!(self, Self::Tcp { .. }) + } + + #[must_use] + pub const fn is_udp(&self) -> bool { + matches!(self, Self::Udp { .. }) + } + + #[must_use] + pub const fn is_unknown(&self) -> bool { + matches!(self, Self::Unknown) + } + + #[must_use] + pub const fn source(&self) -> Option<&SocketAddr> { + match self { + Self::Udp { source, .. } | Self::Tcp { source, .. } => Some(source), + Self::Unknown => None, + } + } + + #[must_use] + pub const fn destination(&self) -> Option<&SocketAddr> { + match self { + Self::Udp { destination, .. } | Self::Tcp { destination, .. } => Some(destination), + Self::Unknown => None, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse() { + let mut buf = + b"PROXY TCP4 255.255.255.255 255.255.255.255 65535 65535\r\nhello world".as_slice(); + let info = ProxyProtocolV1Info::parse(&mut buf).unwrap(); + assert_eq!(buf, b"hello world"); + assert!(info.is_tcp()); + assert!(!info.is_udp()); + assert!(!info.is_unknown()); + assert!(info.is_ipv4()); + assert!(!info.is_ipv6()); + + let mut buf = + b"PROXY TCP6 ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff 65535 65535\r\nhello world" + .as_slice(); + let info = ProxyProtocolV1Info::parse(&mut buf).unwrap(); + assert_eq!(buf, b"hello world"); + assert!(info.is_tcp()); + assert!(!info.is_udp()); + assert!(!info.is_unknown()); + assert!(!info.is_ipv4()); + assert!(info.is_ipv6()); + + let mut buf = b"PROXY UNKNOWN\r\nhello world".as_slice(); + let info = ProxyProtocolV1Info::parse(&mut buf).unwrap(); + assert_eq!(buf, b"hello world"); + assert!(!info.is_tcp()); + assert!(!info.is_udp()); + assert!(info.is_unknown()); + assert!(!info.is_ipv4()); + assert!(!info.is_ipv6()); + + let mut buf = + b"PROXY UNKNOWN ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff 65535 65535\r\nhello world" + .as_slice(); + let info = ProxyProtocolV1Info::parse(&mut buf).unwrap(); + assert_eq!(buf, b"hello world"); + assert!(!info.is_tcp()); + assert!(!info.is_udp()); + assert!(info.is_unknown()); + assert!(!info.is_ipv4()); + assert!(!info.is_ipv6()); + } +} diff --git a/matrix-authentication-service/crates/listener/src/rewind.rs b/matrix-authentication-service/crates/listener/src/rewind.rs new file mode 100644 index 00000000..98090b93 --- /dev/null +++ b/matrix-authentication-service/crates/listener/src/rewind.rs @@ -0,0 +1,151 @@ +// Taken from hyper@0.14.20, src/common/io/rewind.rs + +use std::{ + cmp, io, + marker::Unpin, + pin::Pin, + task::{Context, Poll}, +}; + +use bytes::{Buf, Bytes}; +use tokio::io::{AsyncRead, AsyncWrite, ReadBuf}; + +/// Combine a buffer with an IO, rewinding reads to use the buffer. +#[derive(Debug)] +pub struct Rewind { + pre: Option, + inner: T, +} + +impl Rewind { + pub(crate) fn new(io: T) -> Self { + Rewind { + pre: None, + inner: io, + } + } + + pub(crate) fn new_buffered(io: T, buf: Bytes) -> Self { + Rewind { + pre: Some(buf), + inner: io, + } + } + + #[cfg(test)] + pub(crate) fn rewind(&mut self, bs: Bytes) { + debug_assert!(self.pre.is_none()); + self.pre = Some(bs); + } +} + +impl AsyncRead for Rewind +where + T: AsyncRead + Unpin, +{ + fn poll_read( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + if let Some(mut prefix) = self.pre.take() { + // If there are no remaining bytes, let the bytes get dropped. + if !prefix.is_empty() { + let copy_len = cmp::min(prefix.len(), buf.remaining()); + // TODO: There should be a way to do following two lines cleaner... + buf.put_slice(&prefix[..copy_len]); + prefix.advance(copy_len); + // Put back what's left + if !prefix.is_empty() { + self.pre = Some(prefix); + } + + return Poll::Ready(Ok(())); + } + } + Pin::new(&mut self.inner).poll_read(cx, buf) + } +} + +impl AsyncWrite for Rewind +where + T: AsyncWrite + Unpin, +{ + fn poll_write( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &[u8], + ) -> Poll> { + Pin::new(&mut self.inner).poll_write(cx, buf) + } + + fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + Pin::new(&mut self.inner).poll_flush(cx) + } + + fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + Pin::new(&mut self.inner).poll_shutdown(cx) + } + + fn poll_write_vectored( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + bufs: &[io::IoSlice<'_>], + ) -> Poll> { + Pin::new(&mut self.inner).poll_write_vectored(cx, bufs) + } + + fn is_write_vectored(&self) -> bool { + self.inner.is_write_vectored() + } +} + +#[cfg(test)] +mod tests { + // FIXME: re-implement tests with `async/await`, this import should + // trigger a warning to remind us + use bytes::Bytes; + use tokio::io::AsyncReadExt; + + use super::Rewind; + + #[tokio::test] + async fn partial_rewind() { + let underlying = [104, 101, 108, 108, 111]; + + let mock = tokio_test::io::Builder::new().read(&underlying).build(); + + let mut stream = Rewind::new(mock); + + // Read off some bytes, ensure we filled o1 + let mut buf = [0; 2]; + stream.read_exact(&mut buf).await.expect("read1"); + + // Rewind the stream so that it is as if we never read in the first place. + stream.rewind(Bytes::copy_from_slice(&buf[..])); + + let mut buf = [0; 5]; + stream.read_exact(&mut buf).await.expect("read1"); + + // At this point we should have read everything that was in the MockStream + assert_eq!(&buf, &underlying); + } + + #[tokio::test] + async fn full_rewind() { + let underlying = [104, 101, 108, 108, 111]; + + let mock = tokio_test::io::Builder::new().read(&underlying).build(); + + let mut stream = Rewind::new(mock); + + let mut buf = [0; 5]; + stream.read_exact(&mut buf).await.expect("read1"); + + // Rewind the stream so that it is as if we never read in the first place. + stream.rewind(Bytes::copy_from_slice(&buf[..])); + + let mut buf = [0; 5]; + stream.read_exact(&mut buf).await.expect("read1"); + } +} diff --git a/matrix-authentication-service/crates/listener/src/server.rs b/matrix-authentication-service/crates/listener/src/server.rs new file mode 100644 index 00000000..2a0b6ccd --- /dev/null +++ b/matrix-authentication-service/crates/listener/src/server.rs @@ -0,0 +1,455 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + pin::Pin, + sync::Arc, + task::{Context, Poll}, + time::Duration, +}; + +use futures_util::{StreamExt, stream::SelectAll}; +use hyper::{Request, Response}; +use hyper_util::{ + rt::{TokioExecutor, TokioIo}, + server::conn::auto::Connection, + service::TowerToHyperService, +}; +use mas_context::LogContext; +use pin_project_lite::pin_project; +use thiserror::Error; +use tokio_rustls::rustls::ServerConfig; +use tokio_util::sync::{CancellationToken, WaitForCancellationFutureOwned}; +use tower::Service; +use tower_http::add_extension::AddExtension; +use tracing::Instrument; + +use crate::{ + ConnectionInfo, + maybe_tls::{MaybeTlsAcceptor, MaybeTlsStream, TlsStreamInfo}, + proxy_protocol::{MaybeProxyAcceptor, ProxyAcceptError}, + rewind::Rewind, + unix_or_tcp::{SocketAddr, UnixOrTcpConnection, UnixOrTcpListener}, +}; + +/// The timeout for the handshake to complete +const HANDSHAKE_TIMEOUT: Duration = Duration::from_secs(5); + +pub struct Server { + tls: Option>, + proxy: bool, + listener: UnixOrTcpListener, + service: S, +} + +impl Server { + /// # Errors + /// + /// Returns an error if the listener couldn't be converted via [`TryInto`] + pub fn try_new(listener: L, service: S) -> Result + where + L: TryInto, + { + Ok(Self { + tls: None, + proxy: false, + listener: listener.try_into()?, + service, + }) + } + + #[must_use] + pub fn new(listener: impl Into, service: S) -> Self { + Self { + tls: None, + proxy: false, + listener: listener.into(), + service, + } + } + + #[must_use] + pub const fn with_proxy(mut self) -> Self { + self.proxy = true; + self + } + + #[must_use] + pub fn with_tls(mut self, config: Arc) -> Self { + self.tls = Some(config); + self + } + + /// Run a single server + pub async fn run( + self, + soft_shutdown_token: CancellationToken, + hard_shutdown_token: CancellationToken, + ) where + S: Service, Response = Response> + Clone + Send + 'static, + S::Future: Send + 'static, + S::Error: std::error::Error + Send + Sync + 'static, + B: http_body::Body + Send + 'static, + B::Data: Send, + B::Error: std::error::Error + Send + Sync + 'static, + { + run_servers( + std::iter::once(self), + soft_shutdown_token, + hard_shutdown_token, + ) + .await; + } +} + +#[derive(Debug, Error)] +#[non_exhaustive] +enum AcceptError { + #[error("failed to complete the TLS handshake")] + TlsHandshake { + #[source] + source: std::io::Error, + }, + + #[error("failed to complete the proxy protocol handshake")] + ProxyHandshake { + #[source] + source: ProxyAcceptError, + }, + + #[error("connection handshake timed out")] + HandshakeTimeout { + #[source] + source: tokio::time::error::Elapsed, + }, +} + +impl AcceptError { + fn tls_handshake(source: std::io::Error) -> Self { + Self::TlsHandshake { source } + } + + fn proxy_handshake(source: ProxyAcceptError) -> Self { + Self::ProxyHandshake { source } + } + + fn handshake_timeout(source: tokio::time::error::Elapsed) -> Self { + Self::HandshakeTimeout { source } + } +} + +/// Accept a connection and do the proxy protocol and TLS handshake +/// +/// Returns an error if the proxy protocol or TLS handshake failed. +/// Returns the connection, which should be used to spawn a task to serve the +/// connection. +#[allow(clippy::type_complexity)] +#[tracing::instrument( + name = "accept", + skip_all, + fields( + network.protocol.name = "http", + network.peer.address, + network.peer.port, + ), +)] +async fn accept( + maybe_proxy_acceptor: &MaybeProxyAcceptor, + maybe_tls_acceptor: &MaybeTlsAcceptor, + peer_addr: SocketAddr, + stream: UnixOrTcpConnection, + service: S, +) -> Result< + Connection< + 'static, + TokioIo>>, + TowerToHyperService>, + TokioExecutor, + >, + AcceptError, +> +where + S: Service, Response = Response> + Send + Clone + 'static, + S::Error: std::error::Error + Send + Sync + 'static, + S::Future: Send + 'static, + B: http_body::Body + Send + 'static, + B::Data: Send, + B::Error: std::error::Error + Send + Sync + 'static, +{ + let span = tracing::Span::current(); + + match peer_addr { + SocketAddr::Net(addr) => { + span.record("network.peer.address", tracing::field::display(addr.ip())); + span.record("network.peer.port", addr.port()); + } + SocketAddr::Unix(ref addr) => { + span.record("network.peer.address", tracing::field::debug(addr)); + } + } + + // Wrap the connection acceptation logic in a timeout + tokio::time::timeout(HANDSHAKE_TIMEOUT, async move { + let (proxy, stream) = maybe_proxy_acceptor + .accept(stream) + .await + .map_err(AcceptError::proxy_handshake)?; + + let stream = maybe_tls_acceptor + .accept(stream) + .await + .map_err(AcceptError::tls_handshake)?; + + let tls = stream.tls_info(); + + // Figure out if it's HTTP/2 based on the negociated ALPN info + let is_h2 = tls.as_ref().is_some_and(TlsStreamInfo::is_alpn_h2); + + let info = ConnectionInfo { + tls, + proxy, + net_peer_addr: peer_addr.into_net(), + }; + + let mut builder = hyper_util::server::conn::auto::Builder::new(TokioExecutor::new()); + if is_h2 { + builder = builder.http2_only(); + } + builder.http1().keep_alive(true); + + let service = TowerToHyperService::new(AddExtension::new(service, info)); + + let conn = builder + .serve_connection(TokioIo::new(stream), service) + .into_owned(); + + Ok(conn) + }) + .instrument(span) + .await + .map_err(AcceptError::handshake_timeout)? +} + +pin_project! { + /// A wrapper around a connection that can be aborted when a shutdown signal is received. + /// + /// This works by sharing an atomic boolean between all connections, and when a shutdown + /// signal is received, the boolean is set to true. The connection will then check the + /// boolean before polling the underlying connection, and if it's true, it will start a + /// graceful shutdown. + /// + /// We also use an event listener to wake up the connection when the shutdown signal is + /// received, because the connection needs to be polled again to start the graceful shutdown. + struct AbortableConnection { + #[pin] + connection: C, + #[pin] + cancellation_future: WaitForCancellationFutureOwned, + did_start_shutdown: bool, + } +} + +impl AbortableConnection { + fn new(connection: C, cancellation_token: CancellationToken) -> Self { + Self { + connection, + cancellation_future: cancellation_token.cancelled_owned(), + did_start_shutdown: false, + } + } +} + +impl Future + for AbortableConnection, TokioExecutor>> +where + Connection<'static, T, TowerToHyperService, TokioExecutor>: Future, + S: Service, Response = Response> + Send + Clone + 'static, + S::Future: Send + 'static, + S::Error: std::error::Error + Send + Sync, + T: hyper::rt::Read + hyper::rt::Write + Unpin, + B: http_body::Body + Send + 'static, + B::Data: Send, + B::Error: std::error::Error + Send + Sync + 'static, +{ + type Output = , TokioExecutor> as Future>::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let mut this = self.project(); + + if let Poll::Ready(()) = this.cancellation_future.poll(cx) + && !*this.did_start_shutdown + { + *this.did_start_shutdown = true; + this.connection.as_mut().graceful_shutdown(); + } + + this.connection.poll(cx) + } +} + +pub async fn run_servers( + listeners: impl IntoIterator>, + soft_shutdown_token: CancellationToken, + hard_shutdown_token: CancellationToken, +) where + S: Service, Response = Response> + Clone + Send + 'static, + S::Future: Send + 'static, + S::Error: std::error::Error + Send + Sync + 'static, + B: http_body::Body + Send + 'static, + B::Data: Send, + B::Error: std::error::Error + Send + Sync + 'static, +{ + // This guard on the shutdown token is to ensure that if this task crashes for + // any reason, the server will shut down + let _guard = soft_shutdown_token.clone().drop_guard(); + + // Create a stream of accepted connections out of the listeners + let mut accept_stream: SelectAll<_> = listeners + .into_iter() + .map(|server| { + let maybe_proxy_acceptor = MaybeProxyAcceptor::new(server.proxy); + let maybe_tls_acceptor = MaybeTlsAcceptor::new(server.tls); + futures_util::stream::poll_fn(move |cx| { + let res = + std::task::ready!(server.listener.poll_accept(cx)).map(|(addr, stream)| { + ( + maybe_proxy_acceptor, + maybe_tls_acceptor.clone(), + server.service.clone(), + addr, + stream, + ) + }); + Poll::Ready(Some(res)) + }) + }) + .collect(); + + // A JoinSet which collects connections that are being accepted + let mut accept_tasks = tokio::task::JoinSet::new(); + // A JoinSet which collects connections that are being served + let mut connection_tasks = tokio::task::JoinSet::new(); + + loop { + tokio::select! { + biased; + + // First look for the shutdown signal + () = soft_shutdown_token.cancelled() => { + tracing::debug!("Shutting down listeners"); + break; + }, + + // Poll on the JoinSet to collect connections to serve + res = accept_tasks.join_next(), if !accept_tasks.is_empty() => { + match res { + Some(Ok(Some(connection))) => { + let token = soft_shutdown_token.child_token(); + connection_tasks.spawn(LogContext::new("http-serve").run(async move || { + tracing::debug!("Accepted connection"); + if let Err(e) = AbortableConnection::new(connection, token).await { + tracing::warn!(error = &*e as &dyn std::error::Error, "Failed to serve connection"); + } + })); + }, + Some(Ok(None)) => { /* Connection did not finish handshake, error should be logged in `accept` */ }, + Some(Err(e)) => tracing::error!(error = &e as &dyn std::error::Error, "Join error"), + None => tracing::error!("Join set was polled even though it was empty"), + } + }, + + // Poll on the JoinSet to collect finished connections + res = connection_tasks.join_next(), if !connection_tasks.is_empty() => { + match res { + Some(Ok(())) => { /* Connection finished, any errors should be logged in in the spawned task */ }, + Some(Err(e)) => tracing::error!(error = &e as &dyn std::error::Error, "Join error"), + None => tracing::error!("Join set was polled even though it was empty"), + } + }, + + // Look for connections to accept + res = accept_stream.next() => { + let Some(res) = res else { continue }; + + // Spawn the connection in the set, so we don't have to wait for the handshake to + // accept the next connection. This allows us to keep track of active connections + // and waiting on them for a graceful shutdown + accept_tasks.spawn(LogContext::new("http-accept").run(async move || { + let (maybe_proxy_acceptor, maybe_tls_acceptor, service, peer_addr, stream) = match res { + Ok(res) => res, + Err(e) => { + tracing::warn!(error = &e as &dyn std::error::Error, "Failed to accept connection from the underlying socket"); + return None; + } + }; + + match accept(&maybe_proxy_acceptor, &maybe_tls_acceptor, peer_addr, stream, service).await { + Ok(connection) => Some(connection), + Err(e) => { + tracing::warn!(error = &e as &dyn std::error::Error, "Failed to accept connection"); + None + } + } + })); + }, + }; + } + + // Wait for connections to cleanup + if !accept_tasks.is_empty() || !connection_tasks.is_empty() { + tracing::info!( + "There are {active} active connections ({pending} pending), performing a graceful shutdown. Send the shutdown signal again to force.", + active = connection_tasks.len(), + pending = accept_tasks.len(), + ); + + while !accept_tasks.is_empty() || !connection_tasks.is_empty() { + tokio::select! { + biased; + + // Poll on the JoinSet to collect connections to serve + res = accept_tasks.join_next(), if !accept_tasks.is_empty() => { + match res { + Some(Ok(Some(connection))) => { + let token = soft_shutdown_token.child_token(); + connection_tasks.spawn(LogContext::new("http-serve").run(async || { + tracing::debug!("Accepted connection"); + if let Err(e) = AbortableConnection::new(connection, token).await { + tracing::warn!(error = &*e as &dyn std::error::Error, "Failed to serve connection"); + } + })); + } + Some(Ok(None)) => { /* Connection did not finish handshake, error should be logged in `accept` */ }, + Some(Err(e)) => tracing::error!(error = &e as &dyn std::error::Error, "Join error"), + None => tracing::error!("Join set was polled even though it was empty"), + } + }, + + // Poll on the JoinSet to collect finished connections + res = connection_tasks.join_next(), if !connection_tasks.is_empty() => { + match res { + Some(Ok(())) => { /* Connection finished, any errors should be logged in in the spawned task */ }, + Some(Err(e)) => tracing::error!(error = &e as &dyn std::error::Error, "Join error"), + None => tracing::error!("Join set was polled even though it was empty"), + } + }, + + // Handle when we are asked to hard shutdown + () = hard_shutdown_token.cancelled() => { + tracing::warn!( + "Forcing shutdown ({active} active connections, {pending} pending connections)", + active = connection_tasks.len(), + pending = accept_tasks.len(), + ); + break; + }, + } + } + } + + accept_tasks.shutdown().await; + connection_tasks.shutdown().await; +} diff --git a/matrix-authentication-service/crates/listener/src/unix_or_tcp.rs b/matrix-authentication-service/crates/listener/src/unix_or_tcp.rs new file mode 100644 index 00000000..5cd85c44 --- /dev/null +++ b/matrix-authentication-service/crates/listener/src/unix_or_tcp.rs @@ -0,0 +1,320 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A listener which can listen on either TCP sockets or on UNIX domain sockets + +// TODO: Unlink the UNIX socket on drop? + +use std::{ + pin::Pin, + task::{Context, Poll, ready}, +}; + +use tokio::{ + io::{AsyncRead, AsyncWrite}, + net::{TcpListener, TcpStream, UnixListener, UnixStream}, +}; + +pub enum SocketAddr { + Unix(tokio::net::unix::SocketAddr), + Net(std::net::SocketAddr), +} + +impl From for SocketAddr { + fn from(value: tokio::net::unix::SocketAddr) -> Self { + Self::Unix(value) + } +} + +impl From for SocketAddr { + fn from(value: std::net::SocketAddr) -> Self { + Self::Net(value) + } +} + +impl std::fmt::Debug for SocketAddr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Unix(l) => std::fmt::Debug::fmt(l, f), + Self::Net(l) => std::fmt::Debug::fmt(l, f), + } + } +} + +impl SocketAddr { + #[must_use] + pub fn into_net(self) -> Option { + match self { + Self::Net(socket) => Some(socket), + Self::Unix(_) => None, + } + } + + #[must_use] + pub fn into_unix(self) -> Option { + match self { + Self::Net(_) => None, + Self::Unix(socket) => Some(socket), + } + } + + #[must_use] + pub const fn as_net(&self) -> Option<&std::net::SocketAddr> { + match self { + Self::Net(socket) => Some(socket), + Self::Unix(_) => None, + } + } + + #[must_use] + pub const fn as_unix(&self) -> Option<&tokio::net::unix::SocketAddr> { + match self { + Self::Net(_) => None, + Self::Unix(socket) => Some(socket), + } + } +} + +pub enum UnixOrTcpListener { + Unix(UnixListener), + Tcp(TcpListener), +} + +impl From for UnixOrTcpListener { + fn from(listener: UnixListener) -> Self { + Self::Unix(listener) + } +} + +impl From for UnixOrTcpListener { + fn from(listener: TcpListener) -> Self { + Self::Tcp(listener) + } +} + +impl TryFrom for UnixOrTcpListener { + type Error = std::io::Error; + + fn try_from(listener: std::os::unix::net::UnixListener) -> Result { + listener.set_nonblocking(true)?; + Ok(Self::Unix(UnixListener::from_std(listener)?)) + } +} + +impl TryFrom for UnixOrTcpListener { + type Error = std::io::Error; + + fn try_from(listener: std::net::TcpListener) -> Result { + listener.set_nonblocking(true)?; + Ok(Self::Tcp(TcpListener::from_std(listener)?)) + } +} + +impl UnixOrTcpListener { + /// Get the local address of the listener + /// + /// # Errors + /// + /// Returns an error on rare cases where the underlying [`TcpListener`] or + /// [`UnixListener`] couldn't provide the local address + pub fn local_addr(&self) -> Result { + match self { + Self::Unix(listener) => listener.local_addr().map(SocketAddr::from), + Self::Tcp(listener) => listener.local_addr().map(SocketAddr::from), + } + } + + pub const fn is_unix(&self) -> bool { + matches!(self, Self::Unix(_)) + } + + pub const fn is_tcp(&self) -> bool { + matches!(self, Self::Tcp(_)) + } + + /// Accept an incoming connection + /// + /// # Cancel safety + /// + /// This function is safe to cancel, as both [`UnixListener::accept`] and + /// [`TcpListener::accept`] are safe to cancel. + /// + /// # Errors + /// + /// Returns an error if the underlying socket couldn't accept the connection + pub async fn accept(&self) -> Result<(SocketAddr, UnixOrTcpConnection), std::io::Error> { + match self { + Self::Unix(listener) => { + let (stream, remote_addr) = listener.accept().await?; + + let socket = socket2::SockRef::from(&stream); + socket.set_keepalive(true)?; + + Ok((remote_addr.into(), UnixOrTcpConnection::Unix { stream })) + } + Self::Tcp(listener) => { + let (stream, remote_addr) = listener.accept().await?; + + let socket = socket2::SockRef::from(&stream); + socket.set_keepalive(true)?; + socket.set_tcp_nodelay(true)?; + + Ok((remote_addr.into(), UnixOrTcpConnection::Tcp { stream })) + } + } + } + + /// Poll for an incoming connection + /// + /// # Cancel safety + /// + /// This function is safe to cancel, as both [`UnixListener::poll_accept`] + /// and [`TcpListener::poll_accept`] are safe to cancel. + /// + /// # Errors + /// + /// Returns an error if the underlying socket couldn't accept the connection + pub fn poll_accept( + &self, + cx: &mut Context<'_>, + ) -> Poll> { + match self { + Self::Unix(listener) => { + let (stream, remote_addr) = ready!(listener.poll_accept(cx)?); + + let socket = socket2::SockRef::from(&stream); + socket.set_keepalive(true)?; + + Poll::Ready(Ok(( + remote_addr.into(), + UnixOrTcpConnection::Unix { stream }, + ))) + } + Self::Tcp(listener) => { + let (stream, remote_addr) = ready!(listener.poll_accept(cx)?); + + let socket = socket2::SockRef::from(&stream); + socket.set_keepalive(true)?; + socket.set_tcp_nodelay(true)?; + + Poll::Ready(Ok(( + remote_addr.into(), + UnixOrTcpConnection::Tcp { stream }, + ))) + } + } + } +} + +pin_project_lite::pin_project! { + #[project = UnixOrTcpConnectionProj] + pub enum UnixOrTcpConnection { + Unix { + #[pin] + stream: UnixStream, + }, + + Tcp { + #[pin] + stream: TcpStream, + }, + } +} + +impl From for UnixOrTcpConnection { + fn from(stream: TcpStream) -> Self { + Self::Tcp { stream } + } +} + +impl UnixOrTcpConnection { + /// Get the local address of the stream + /// + /// # Errors + /// + /// Returns an error on rare cases where the underlying [`TcpStream`] or + /// [`UnixStream`] couldn't provide the local address + pub fn local_addr(&self) -> Result { + match self { + Self::Unix { stream } => stream.local_addr().map(SocketAddr::from), + Self::Tcp { stream } => stream.local_addr().map(SocketAddr::from), + } + } + + /// Get the remote address of the stream + /// + /// # Errors + /// + /// Returns an error on rare cases where the underlying [`TcpStream`] or + /// [`UnixStream`] couldn't provide the remote address + pub fn peer_addr(&self) -> Result { + match self { + Self::Unix { stream } => stream.peer_addr().map(SocketAddr::from), + Self::Tcp { stream } => stream.peer_addr().map(SocketAddr::from), + } + } +} + +impl AsyncRead for UnixOrTcpConnection { + fn poll_read( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &mut tokio::io::ReadBuf<'_>, + ) -> Poll> { + match self.project() { + UnixOrTcpConnectionProj::Unix { stream } => stream.poll_read(cx, buf), + UnixOrTcpConnectionProj::Tcp { stream } => stream.poll_read(cx, buf), + } + } +} + +impl AsyncWrite for UnixOrTcpConnection { + fn poll_write( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &[u8], + ) -> Poll> { + match self.project() { + UnixOrTcpConnectionProj::Unix { stream } => stream.poll_write(cx, buf), + UnixOrTcpConnectionProj::Tcp { stream } => stream.poll_write(cx, buf), + } + } + + fn poll_write_vectored( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + bufs: &[std::io::IoSlice<'_>], + ) -> Poll> { + match self.project() { + UnixOrTcpConnectionProj::Unix { stream } => stream.poll_write_vectored(cx, bufs), + UnixOrTcpConnectionProj::Tcp { stream } => stream.poll_write_vectored(cx, bufs), + } + } + + fn is_write_vectored(&self) -> bool { + match self { + UnixOrTcpConnection::Unix { stream } => stream.is_write_vectored(), + UnixOrTcpConnection::Tcp { stream } => stream.is_write_vectored(), + } + } + + fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + match self.project() { + UnixOrTcpConnectionProj::Unix { stream } => stream.poll_flush(cx), + UnixOrTcpConnectionProj::Tcp { stream } => stream.poll_flush(cx), + } + } + + fn poll_shutdown( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + match self.project() { + UnixOrTcpConnectionProj::Unix { stream } => stream.poll_shutdown(cx), + UnixOrTcpConnectionProj::Tcp { stream } => stream.poll_shutdown(cx), + } + } +} diff --git a/matrix-authentication-service/crates/matrix-synapse/Cargo.toml b/matrix-authentication-service/crates/matrix-synapse/Cargo.toml new file mode 100644 index 00000000..34c08e9c --- /dev/null +++ b/matrix-authentication-service/crates/matrix-synapse/Cargo.toml @@ -0,0 +1,31 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-matrix-synapse" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +http.workspace = true +reqwest.workspace = true +serde.workspace = true +thiserror.workspace = true +tracing.workspace = true +url.workspace = true +urlencoding.workspace = true + +mas-http.workspace = true +mas-matrix.workspace = true diff --git a/matrix-authentication-service/crates/matrix-synapse/src/error.rs b/matrix-authentication-service/crates/matrix-synapse/src/error.rs new file mode 100644 index 00000000..c1d98ccd --- /dev/null +++ b/matrix-authentication-service/crates/matrix-synapse/src/error.rs @@ -0,0 +1,79 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::fmt::Display; + +use async_trait::async_trait; +use serde::Deserialize; +use thiserror::Error; + +/// Encountered when trying to register a user ID which has been taken. +/// — +pub(crate) const M_USER_IN_USE: &str = "M_USER_IN_USE"; +/// Encountered when trying to register a user ID which is not valid. +/// — +pub(crate) const M_INVALID_USERNAME: &str = "M_INVALID_USERNAME"; +/// Encountered when trying to register a user ID reserved by an appservice. +/// — +pub(crate) const M_EXCLUSIVE: &str = "M_EXCLUSIVE"; + +/// Represents a Matrix error +/// Ref: +#[derive(Debug, Deserialize)] +struct MatrixError { + errcode: String, + error: String, +} + +/// Represents an error received from the homeserver. +/// Where possible, we capture the Matrix error from the JSON response body. +#[derive(Debug, Error)] +pub(crate) struct Error { + synapse_error: Option, + + #[source] + source: reqwest::Error, +} + +impl Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(matrix_error) = &self.synapse_error { + write!(f, "{}: {}", matrix_error.errcode, matrix_error.error) + } else { + write!(f, "(no specific error)") + } + } +} + +impl Error { + /// Return the error code (`errcode`) + pub fn errcode(&self) -> Option<&str> { + let me = self.synapse_error.as_ref()?; + Some(&me.errcode) + } +} + +/// An extension trait for [`reqwest::Response`] to help working with errors +/// from Synapse. +#[async_trait] +pub(crate) trait SynapseResponseExt: Sized { + async fn error_for_synapse_error(self) -> Result; +} + +#[async_trait] +impl SynapseResponseExt for reqwest::Response { + async fn error_for_synapse_error(self) -> Result { + match self.error_for_status_ref() { + Ok(_response) => Ok(self), + Err(source) => { + let synapse_error = self.json().await.ok(); + Err(Error { + synapse_error, + source, + }) + } + } + } +} diff --git a/matrix-authentication-service/crates/matrix-synapse/src/legacy.rs b/matrix-authentication-service/crates/matrix-synapse/src/legacy.rs new file mode 100644 index 00000000..b93298ce --- /dev/null +++ b/matrix-authentication-service/crates/matrix-synapse/src/legacy.rs @@ -0,0 +1,688 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashSet, time::Duration}; + +use anyhow::{Context, bail}; +use http::{Method, StatusCode}; +use mas_http::RequestBuilderExt as _; +use mas_matrix::{HomeserverConnection, MatrixUser, ProvisionRequest}; +use serde::{Deserialize, Serialize}; +use tracing::debug; +use url::Url; + +use crate::error::{M_EXCLUSIVE, M_INVALID_USERNAME, M_USER_IN_USE, SynapseResponseExt}; + +static SYNAPSE_AUTH_PROVIDER: &str = "oauth-delegated"; + +#[derive(Clone)] +pub struct SynapseConnection { + homeserver: String, + endpoint: Url, + access_token: String, + http_client: reqwest::Client, +} + +impl SynapseConnection { + #[must_use] + pub fn new( + homeserver: String, + endpoint: Url, + access_token: String, + http_client: reqwest::Client, + ) -> Self { + Self { + homeserver, + endpoint, + access_token, + http_client, + } + } + + fn builder(&self, method: Method, url: &str) -> reqwest::RequestBuilder { + self.http_client + .request( + method, + self.endpoint + .join(url) + .map(String::from) + .unwrap_or_default(), + ) + .bearer_auth(&self.access_token) + } + + fn post(&self, url: &str) -> reqwest::RequestBuilder { + self.builder(Method::POST, url) + } + + fn get(&self, url: &str) -> reqwest::RequestBuilder { + self.builder(Method::GET, url) + } + + fn put(&self, url: &str) -> reqwest::RequestBuilder { + self.builder(Method::PUT, url) + } + + fn delete(&self, url: &str) -> reqwest::RequestBuilder { + self.builder(Method::DELETE, url) + } +} + +#[derive(Serialize, Deserialize)] +struct ExternalID { + auth_provider: String, + external_id: String, +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +enum ThreePIDMedium { + Email, + Msisdn, +} + +#[derive(Serialize, Deserialize)] +struct ThreePID { + medium: ThreePIDMedium, + address: String, +} + +#[derive(Default, Serialize, Deserialize)] +struct SynapseUser { + #[serde( + default, + rename = "displayname", + skip_serializing_if = "Option::is_none" + )] + display_name: Option, + + #[serde(default, skip_serializing_if = "Option::is_none")] + avatar_url: Option, + + #[serde(default, rename = "threepids", skip_serializing_if = "Option::is_none")] + three_pids: Option>, + + #[serde(default, skip_serializing_if = "Option::is_none")] + external_ids: Option>, + + #[serde(default, skip_serializing_if = "Option::is_none")] + deactivated: Option, +} + +#[derive(Deserialize)] +struct SynapseDeviceListResponse { + devices: Vec, +} + +#[derive(Serialize, Deserialize)] +struct SynapseDevice { + device_id: String, + + #[serde(default, skip_serializing_if = "Option::is_none")] + dehydrated: Option, +} + +#[derive(Serialize)] +struct SynapseUpdateDeviceRequest<'a> { + display_name: Option<&'a str>, +} + +#[derive(Serialize)] +struct SynapseDeleteDevicesRequest { + devices: Vec, +} + +#[derive(Serialize)] +struct SetDisplayNameRequest<'a> { + displayname: &'a str, +} + +#[derive(Serialize)] +struct SynapseDeactivateUserRequest { + erase: bool, +} + +#[derive(Serialize)] +struct SynapseAllowCrossSigningResetRequest {} + +/// Response body of +/// `/_synapse/admin/v1/username_available?username={localpart}` +#[derive(Deserialize)] +struct UsernameAvailableResponse { + available: bool, +} + +#[async_trait::async_trait] +impl HomeserverConnection for SynapseConnection { + fn homeserver(&self) -> &str { + &self.homeserver + } + + #[tracing::instrument(name = "homeserver.verify_token", skip_all, err(Debug))] + async fn verify_token(&self, token: &str) -> Result { + Ok(self.access_token == token) + } + + #[tracing::instrument( + name = "homeserver.query_user", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn query_user(&self, localpart: &str) -> Result { + let mxid = self.mxid(localpart); + let encoded_mxid = urlencoding::encode(&mxid); + + let response = self + .get(&format!("_synapse/admin/v2/users/{encoded_mxid}")) + .send_traced() + .await + .context("Failed to query user from Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while querying user from Synapse")?; + + let body: SynapseUser = response + .json() + .await + .context("Failed to deserialize response while querying user from Synapse")?; + + Ok(MatrixUser { + displayname: body.display_name, + avatar_url: body.avatar_url, + deactivated: body.deactivated.unwrap_or(false), + }) + } + + #[tracing::instrument( + name = "homeserver.is_localpart_available", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn is_localpart_available(&self, localpart: &str) -> Result { + // Synapse will give us a M_UNKNOWN error if the localpart is not ASCII, + // so we bail out early + if !localpart.is_ascii() { + return Ok(false); + } + + let localpart = urlencoding::encode(localpart); + + let response = self + .get(&format!( + "_synapse/admin/v1/username_available?username={localpart}" + )) + .send_traced() + .await + .context("Failed to query localpart availability from Synapse")?; + + match response.error_for_synapse_error().await { + Ok(resp) => { + let response: UsernameAvailableResponse = resp.json().await.context( + "Unexpected response while querying localpart availability from Synapse", + )?; + + Ok(response.available) + } + + Err(err) + if err.errcode() == Some(M_INVALID_USERNAME) + || err.errcode() == Some(M_USER_IN_USE) + || err.errcode() == Some(M_EXCLUSIVE) => + { + debug!( + error = &err as &dyn std::error::Error, + "Localpart is not available" + ); + Ok(false) + } + + Err(err) => Err(err).context("Failed to query localpart availability from Synapse"), + } + } + + #[tracing::instrument( + name = "homeserver.provision_user", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = request.localpart(), + user.id = request.sub(), + ), + err(Debug), + )] + async fn provision_user(&self, request: &ProvisionRequest) -> Result { + let mut body = SynapseUser { + external_ids: Some(vec![ExternalID { + auth_provider: SYNAPSE_AUTH_PROVIDER.to_owned(), + external_id: request.sub().to_owned(), + }]), + ..SynapseUser::default() + }; + + request + .on_displayname(|displayname| { + body.display_name = Some(displayname.unwrap_or_default().to_owned()); + }) + .on_avatar_url(|avatar_url| { + body.avatar_url = Some(avatar_url.unwrap_or_default().to_owned()); + }) + .on_emails(|emails| { + body.three_pids = Some( + emails + .unwrap_or_default() + .iter() + .map(|email| ThreePID { + medium: ThreePIDMedium::Email, + address: email.clone(), + }) + .collect(), + ); + }); + + let mxid = self.mxid(request.localpart()); + let encoded_mxid = urlencoding::encode(&mxid); + let response = self + .put(&format!("_synapse/admin/v2/users/{encoded_mxid}")) + .json(&body) + .send_traced() + .await + .context("Failed to provision user in Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while provisioning user in Synapse")?; + + match response.status() { + StatusCode::CREATED => Ok(true), + StatusCode::OK => Ok(false), + code => bail!("Unexpected HTTP code while provisioning user in Synapse: {code}"), + } + } + + #[tracing::instrument( + name = "homeserver.upsert_device", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + matrix.device_id = device_id, + ), + err(Debug), + )] + async fn upsert_device( + &self, + localpart: &str, + device_id: &str, + initial_display_name: Option<&str>, + ) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let encoded_mxid = urlencoding::encode(&mxid); + + let response = self + .post(&format!("_synapse/admin/v2/users/{encoded_mxid}/devices")) + .json(&SynapseDevice { + device_id: device_id.to_owned(), + dehydrated: None, + }) + .send_traced() + .await + .context("Failed to create device in Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while creating device in Synapse")?; + + if response.status() != StatusCode::CREATED { + bail!( + "Unexpected HTTP code while creating device in Synapse: {}", + response.status() + ); + } + + // It's annoying, but the POST endpoint doesn't let us set the display name + // of the device, so we have to do it manually. + if let Some(display_name) = initial_display_name { + self.update_device_display_name(localpart, device_id, display_name) + .await?; + } + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.update_device_display_name", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + matrix.device_id = device_id, + ), + err(Debug), + )] + async fn update_device_display_name( + &self, + localpart: &str, + device_id: &str, + display_name: &str, + ) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let encoded_mxid = urlencoding::encode(&mxid); + let device_id = urlencoding::encode(device_id); + let response = self + .put(&format!( + "_synapse/admin/v2/users/{encoded_mxid}/devices/{device_id}" + )) + .json(&SynapseUpdateDeviceRequest { + display_name: Some(display_name), + }) + .send_traced() + .await + .context("Failed to update device display name in Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while updating device display name in Synapse")?; + + if response.status() != StatusCode::OK { + bail!( + "Unexpected HTTP code while updating device display name in Synapse: {}", + response.status() + ); + } + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.delete_device", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + matrix.device_id = device_id, + ), + err(Debug), + )] + async fn delete_device(&self, localpart: &str, device_id: &str) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let encoded_mxid = urlencoding::encode(&mxid); + let encoded_device_id = urlencoding::encode(device_id); + + let response = self + .delete(&format!( + "_synapse/admin/v2/users/{encoded_mxid}/devices/{encoded_device_id}" + )) + .send_traced() + .await + .context("Failed to delete device in Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while deleting device in Synapse")?; + + if response.status() != StatusCode::OK { + bail!( + "Unexpected HTTP code while deleting device in Synapse: {}", + response.status() + ); + } + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.sync_devices", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn sync_devices( + &self, + localpart: &str, + devices: HashSet, + ) -> Result<(), anyhow::Error> { + // Get the list of current devices + let mxid = self.mxid(localpart); + let encoded_mxid = urlencoding::encode(&mxid); + + let response = self + .get(&format!("_synapse/admin/v2/users/{encoded_mxid}/devices")) + .send_traced() + .await + .context("Failed to query devices from Synapse")?; + + let response = response.error_for_synapse_error().await?; + + if response.status() != StatusCode::OK { + bail!( + "Unexpected HTTP code while querying devices from Synapse: {}", + response.status() + ); + } + + let body: SynapseDeviceListResponse = response + .json() + .await + .context("Failed to parse response while querying devices from Synapse")?; + + let existing_devices: HashSet = body + .devices + .into_iter() + .filter(|d| d.dehydrated != Some(true)) + .map(|d| d.device_id) + .collect(); + + // First, delete all the devices that are not needed anymore + let to_delete = existing_devices.difference(&devices).cloned().collect(); + + let response = self + .post(&format!( + "_synapse/admin/v2/users/{encoded_mxid}/delete_devices" + )) + .json(&SynapseDeleteDevicesRequest { devices: to_delete }) + .send_traced() + .await + .context("Failed to delete devices from Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while deleting devices from Synapse")?; + + if response.status() != StatusCode::OK { + bail!( + "Unexpected HTTP code while deleting devices from Synapse: {}", + response.status() + ); + } + + // Then, create the devices that are missing. There is no batching API to do + // this, so we do this sequentially, which is fine as the API is idempotent. + for device_id in devices.difference(&existing_devices) { + self.upsert_device(localpart, device_id, None).await?; + } + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.delete_user", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + erase = erase, + ), + err(Debug), + )] + async fn delete_user(&self, localpart: &str, erase: bool) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let encoded_mxid = urlencoding::encode(&mxid); + + let response = self + .post(&format!("_synapse/admin/v1/deactivate/{encoded_mxid}")) + .json(&SynapseDeactivateUserRequest { erase }) + // Deactivation can take a while, so we set a longer timeout + .timeout(Duration::from_secs(60 * 5)) + .send_traced() + .await + .context("Failed to deactivate user in Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while deactivating user in Synapse")?; + + if response.status() != StatusCode::OK { + bail!( + "Unexpected HTTP code while deactivating user in Synapse: {}", + response.status() + ); + } + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.reactivate_user", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn reactivate_user(&self, localpart: &str) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let encoded_mxid = urlencoding::encode(&mxid); + let response = self + .put(&format!("_synapse/admin/v2/users/{encoded_mxid}")) + .json(&SynapseUser { + deactivated: Some(false), + ..SynapseUser::default() + }) + .send_traced() + .await + .context("Failed to reactivate user in Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while reactivating user in Synapse")?; + + match response.status() { + StatusCode::CREATED | StatusCode::OK => Ok(()), + code => bail!("Unexpected HTTP code while reactivating user in Synapse: {code}",), + } + } + + #[tracing::instrument( + name = "homeserver.set_displayname", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + matrix.displayname = displayname, + ), + err(Debug), + )] + async fn set_displayname( + &self, + localpart: &str, + displayname: &str, + ) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let encoded_mxid = urlencoding::encode(&mxid); + let response = self + .put(&format!( + "_matrix/client/v3/profile/{encoded_mxid}/displayname" + )) + .json(&SetDisplayNameRequest { displayname }) + .send_traced() + .await + .context("Failed to set displayname in Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while setting displayname in Synapse")?; + + if response.status() != StatusCode::OK { + bail!( + "Unexpected HTTP code while setting displayname in Synapse: {}", + response.status() + ); + } + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.unset_displayname", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Display), + )] + async fn unset_displayname(&self, localpart: &str) -> Result<(), anyhow::Error> { + self.set_displayname(localpart, "").await + } + + #[tracing::instrument( + name = "homeserver.allow_cross_signing_reset", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn allow_cross_signing_reset(&self, localpart: &str) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let encoded_mxid = urlencoding::encode(&mxid); + + let response = self + .post(&format!( + "_synapse/admin/v1/users/{encoded_mxid}/_allow_cross_signing_replacement_without_uia" + )) + .json(&SynapseAllowCrossSigningResetRequest {}) + .send_traced() + .await + .context("Failed to allow cross-signing reset in Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while allowing cross-signing reset in Synapse")?; + + if response.status() != StatusCode::OK { + bail!( + "Unexpected HTTP code while allowing cross-signing reset in Synapse: {}", + response.status(), + ); + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/matrix-synapse/src/lib.rs b/matrix-authentication-service/crates/matrix-synapse/src/lib.rs new file mode 100644 index 00000000..062ecaa7 --- /dev/null +++ b/matrix-authentication-service/crates/matrix-synapse/src/lib.rs @@ -0,0 +1,11 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod error; +mod legacy; +mod modern; + +pub use self::{legacy::SynapseConnection as LegacySynapseConnection, modern::SynapseConnection}; diff --git a/matrix-authentication-service/crates/matrix-synapse/src/modern.rs b/matrix-authentication-service/crates/matrix-synapse/src/modern.rs new file mode 100644 index 00000000..3d70f52d --- /dev/null +++ b/matrix-authentication-service/crates/matrix-synapse/src/modern.rs @@ -0,0 +1,567 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashSet; + +use anyhow::Context as _; +use http::{Method, StatusCode}; +use mas_http::RequestBuilderExt; +use mas_matrix::{HomeserverConnection, MatrixUser, ProvisionRequest}; +use serde::{Deserialize, Serialize}; +use tracing::debug; +use url::Url; + +use crate::error::{M_EXCLUSIVE, M_INVALID_USERNAME, M_USER_IN_USE, SynapseResponseExt as _}; + +#[derive(Clone)] +pub struct SynapseConnection { + homeserver: String, + endpoint: Url, + access_token: String, + http_client: reqwest::Client, +} + +impl SynapseConnection { + #[must_use] + pub fn new( + homeserver: String, + endpoint: Url, + access_token: String, + http_client: reqwest::Client, + ) -> Self { + Self { + homeserver, + endpoint, + access_token, + http_client, + } + } + + fn builder(&self, method: Method, url: &str) -> reqwest::RequestBuilder { + self.http_client + .request( + method, + self.endpoint + .join(url) + .map(String::from) + .unwrap_or_default(), + ) + .bearer_auth(&self.access_token) + } + + fn post(&self, url: &str) -> reqwest::RequestBuilder { + self.builder(Method::POST, url) + } + + fn get(&self, url: &str) -> reqwest::RequestBuilder { + self.builder(Method::GET, url) + } +} + +#[async_trait::async_trait] +impl HomeserverConnection for SynapseConnection { + fn homeserver(&self) -> &str { + &self.homeserver + } + + #[tracing::instrument(name = "homeserver.verify_token", skip_all, err(Debug))] + async fn verify_token(&self, token: &str) -> Result { + Ok(self.access_token == token) + } + + #[tracing::instrument( + name = "homeserver.query_user", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn query_user(&self, localpart: &str) -> Result { + #[derive(Deserialize)] + #[allow(dead_code)] + struct Response { + user_id: String, + display_name: Option, + avatar_url: Option, + is_suspended: bool, + is_deactivated: bool, + } + + let encoded_localpart = urlencoding::encode(localpart); + let url = format!("_synapse/mas/query_user?localpart={encoded_localpart}"); + let response = self + .get(&url) + .send_traced() + .await + .context("Failed to query user from Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while querying user from Synapse")?; + + let body: Response = response + .json() + .await + .context("Failed to deserialize response while querying user from Synapse")?; + + Ok(MatrixUser { + displayname: body.display_name, + avatar_url: body.avatar_url, + deactivated: body.is_deactivated, + }) + } + + #[tracing::instrument( + name = "homeserver.provision_user", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = request.localpart(), + ), + err(Debug), + )] + async fn provision_user(&self, request: &ProvisionRequest) -> Result { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + #[serde(skip_serializing_if = "Option::is_none")] + set_displayname: Option, + #[serde(skip_serializing_if = "std::ops::Not::not")] + unset_displayname: bool, + #[serde(skip_serializing_if = "Option::is_none")] + set_avatar_url: Option, + #[serde(skip_serializing_if = "std::ops::Not::not")] + unset_avatar_url: bool, + #[serde(skip_serializing_if = "Option::is_none")] + set_emails: Option>, + #[serde(skip_serializing_if = "std::ops::Not::not")] + unset_emails: bool, + } + + let mut body = Request { + localpart: request.localpart(), + set_displayname: None, + unset_displayname: false, + set_avatar_url: None, + unset_avatar_url: false, + set_emails: None, + unset_emails: false, + }; + + request.on_displayname(|displayname| match displayname { + Some(name) => body.set_displayname = Some(name.to_owned()), + None => body.unset_displayname = true, + }); + + request.on_avatar_url(|avatar_url| match avatar_url { + Some(url) => body.set_avatar_url = Some(url.to_owned()), + None => body.unset_avatar_url = true, + }); + + request.on_emails(|emails| match emails { + Some(emails) => body.set_emails = Some(emails.to_owned()), + None => body.unset_emails = true, + }); + + let response = self + .post("_synapse/mas/provision_user") + .json(&body) + .send_traced() + .await + .context("Failed to provision user in Synapse")?; + + let response = response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while provisioning user in Synapse")?; + + match response.status() { + StatusCode::CREATED => Ok(true), + StatusCode::OK => Ok(false), + code => { + anyhow::bail!("Unexpected HTTP code while provisioning user in Synapse: {code}") + } + } + } + + #[tracing::instrument( + name = "homeserver.is_localpart_available", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn is_localpart_available(&self, localpart: &str) -> Result { + // Synapse will give us an error if the localpart is not ASCII, so we bail out + // early + if !localpart.is_ascii() { + return Ok(false); + } + + let encoded_localpart = urlencoding::encode(localpart); + let url = format!("_synapse/mas/is_localpart_available?localpart={encoded_localpart}"); + let response = self + .get(&url) + .send_traced() + .await + .context("Failed to check localpart availability from Synapse")?; + + match response.error_for_synapse_error().await { + Ok(_resp) => Ok(true), + Err(err) + if err.errcode() == Some(M_INVALID_USERNAME) + || err.errcode() == Some(M_USER_IN_USE) + || err.errcode() == Some(M_EXCLUSIVE) => + { + debug!( + error = &err as &dyn std::error::Error, + "Localpart is not available" + ); + Ok(false) + } + + Err(err) => Err(err).context("Failed to query localpart availability from Synapse"), + } + } + + #[tracing::instrument( + name = "homeserver.upsert_device", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + matrix.device_id = device_id, + ), + err(Debug), + )] + async fn upsert_device( + &self, + localpart: &str, + device_id: &str, + initial_display_name: Option<&str>, + ) -> Result<(), anyhow::Error> { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + device_id: &'a str, + #[serde(skip_serializing_if = "Option::is_none")] + display_name: Option<&'a str>, + } + + let body = Request { + localpart, + device_id, + display_name: initial_display_name, + }; + + let response = self + .post("_synapse/mas/upsert_device") + .json(&body) + .send_traced() + .await + .context("Failed to create device in Synapse")?; + + response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while creating device in Synapse")?; + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.update_device_display_name", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + matrix.device_id = device_id, + ), + err(Debug), + )] + async fn update_device_display_name( + &self, + localpart: &str, + device_id: &str, + display_name: &str, + ) -> Result<(), anyhow::Error> { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + device_id: &'a str, + display_name: &'a str, + } + + let body = Request { + localpart, + device_id, + display_name, + }; + + let response = self + .post("_synapse/mas/update_device_display_name") + .json(&body) + .send_traced() + .await + .context("Failed to update device display name in Synapse")?; + + response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while updating device display name in Synapse")?; + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.delete_device", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + matrix.device_id = device_id, + ), + err(Debug), + )] + async fn delete_device(&self, localpart: &str, device_id: &str) -> Result<(), anyhow::Error> { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + device_id: &'a str, + } + + let body = Request { + localpart, + device_id, + }; + + let response = self + .post("_synapse/mas/delete_device") + .json(&body) + .send_traced() + .await + .context("Failed to delete device in Synapse")?; + + response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while deleting device in Synapse")?; + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.sync_devices", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + matrix.device_count = devices.len(), + ), + err(Debug), + )] + async fn sync_devices( + &self, + localpart: &str, + devices: HashSet, + ) -> Result<(), anyhow::Error> { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + devices: HashSet, + } + + let body = Request { localpart, devices }; + + let response = self + .post("_synapse/mas/sync_devices") + .json(&body) + .send_traced() + .await + .context("Failed to sync devices in Synapse")?; + + response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while syncing devices in Synapse")?; + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.delete_user", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + matrix.erase = erase, + ), + err(Debug), + )] + async fn delete_user(&self, localpart: &str, erase: bool) -> Result<(), anyhow::Error> { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + erase: bool, + } + + let body = Request { localpart, erase }; + + let response = self + .post("_synapse/mas/delete_user") + .json(&body) + .send_traced() + .await + .context("Failed to delete user in Synapse")?; + + response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while deleting user in Synapse")?; + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.reactivate_user", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn reactivate_user(&self, localpart: &str) -> Result<(), anyhow::Error> { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + } + + let body = Request { localpart }; + + let response = self + .post("_synapse/mas/reactivate_user") + .json(&body) + .send_traced() + .await + .context("Failed to reactivate user in Synapse")?; + + response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while reactivating user in Synapse")?; + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.set_displayname", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn set_displayname( + &self, + localpart: &str, + displayname: &str, + ) -> Result<(), anyhow::Error> { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + displayname: &'a str, + } + + let body = Request { + localpart, + displayname, + }; + + let response = self + .post("_synapse/mas/set_displayname") + .json(&body) + .send_traced() + .await + .context("Failed to set displayname in Synapse")?; + + response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while setting displayname in Synapse")?; + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.unset_displayname", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn unset_displayname(&self, localpart: &str) -> Result<(), anyhow::Error> { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + } + + let body = Request { localpart }; + + let response = self + .post("_synapse/mas/unset_displayname") + .json(&body) + .send_traced() + .await + .context("Failed to unset displayname in Synapse")?; + + response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while unsetting displayname in Synapse")?; + + Ok(()) + } + + #[tracing::instrument( + name = "homeserver.allow_cross_signing_reset", + skip_all, + fields( + matrix.homeserver = self.homeserver, + matrix.localpart = localpart, + ), + err(Debug), + )] + async fn allow_cross_signing_reset(&self, localpart: &str) -> Result<(), anyhow::Error> { + #[derive(Serialize)] + struct Request<'a> { + localpart: &'a str, + } + + let body = Request { localpart }; + + let response = self + .post("_synapse/mas/allow_cross_signing_reset") + .json(&body) + .send_traced() + .await + .context("Failed to allow cross-signing reset in Synapse")?; + + response + .error_for_synapse_error() + .await + .context("Unexpected HTTP response while allowing cross-signing reset in Synapse")?; + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/matrix/Cargo.toml b/matrix-authentication-service/crates/matrix/Cargo.toml new file mode 100644 index 00000000..a041fee5 --- /dev/null +++ b/matrix-authentication-service/crates/matrix/Cargo.toml @@ -0,0 +1,23 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-matrix" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +tokio.workspace = true +ruma-common.workspace = true diff --git a/matrix-authentication-service/crates/matrix/src/lib.rs b/matrix-authentication-service/crates/matrix/src/lib.rs new file mode 100644 index 00000000..f1fbe9c8 --- /dev/null +++ b/matrix-authentication-service/crates/matrix/src/lib.rs @@ -0,0 +1,556 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod mock; +mod readonly; + +use std::{collections::HashSet, sync::Arc}; + +use ruma_common::UserId; + +pub use self::{ + mock::HomeserverConnection as MockHomeserverConnection, readonly::ReadOnlyHomeserverConnection, +}; + +#[derive(Debug)] +pub struct MatrixUser { + pub displayname: Option, + pub avatar_url: Option, + pub deactivated: bool, +} + +#[derive(Debug, Default)] +enum FieldAction { + #[default] + DoNothing, + Set(T), + Unset, +} + +pub struct ProvisionRequest { + localpart: String, + sub: String, + displayname: FieldAction, + avatar_url: FieldAction, + emails: FieldAction>, +} + +impl ProvisionRequest { + /// Create a new [`ProvisionRequest`]. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to provision. + /// * `sub` - The `sub` of the user, aka the internal ID. + #[must_use] + pub fn new(localpart: impl Into, sub: impl Into) -> Self { + Self { + localpart: localpart.into(), + sub: sub.into(), + displayname: FieldAction::DoNothing, + avatar_url: FieldAction::DoNothing, + emails: FieldAction::DoNothing, + } + } + + /// Get the `sub` of the user to provision, aka the internal ID. + #[must_use] + pub fn sub(&self) -> &str { + &self.sub + } + + /// Get the localpart of the user to provision. + #[must_use] + pub fn localpart(&self) -> &str { + &self.localpart + } + + /// Ask to set the displayname of the user. + /// + /// # Parameters + /// + /// * `displayname` - The displayname to set. + #[must_use] + pub fn set_displayname(mut self, displayname: String) -> Self { + self.displayname = FieldAction::Set(displayname); + self + } + + /// Ask to unset the displayname of the user. + #[must_use] + pub fn unset_displayname(mut self) -> Self { + self.displayname = FieldAction::Unset; + self + } + + /// Call the given callback if the displayname should be set or unset. + /// + /// # Parameters + /// + /// * `callback` - The callback to call. + pub fn on_displayname(&self, callback: F) -> &Self + where + F: FnOnce(Option<&str>), + { + match &self.displayname { + FieldAction::Unset => callback(None), + FieldAction::Set(displayname) => callback(Some(displayname)), + FieldAction::DoNothing => {} + } + + self + } + + /// Ask to set the avatar URL of the user. + /// + /// # Parameters + /// + /// * `avatar_url` - The avatar URL to set. + #[must_use] + pub fn set_avatar_url(mut self, avatar_url: String) -> Self { + self.avatar_url = FieldAction::Set(avatar_url); + self + } + + /// Ask to unset the avatar URL of the user. + #[must_use] + pub fn unset_avatar_url(mut self) -> Self { + self.avatar_url = FieldAction::Unset; + self + } + + /// Call the given callback if the avatar URL should be set or unset. + /// + /// # Parameters + /// + /// * `callback` - The callback to call. + pub fn on_avatar_url(&self, callback: F) -> &Self + where + F: FnOnce(Option<&str>), + { + match &self.avatar_url { + FieldAction::Unset => callback(None), + FieldAction::Set(avatar_url) => callback(Some(avatar_url)), + FieldAction::DoNothing => {} + } + + self + } + + /// Ask to set the emails of the user. + /// + /// # Parameters + /// + /// * `emails` - The list of emails to set. + #[must_use] + pub fn set_emails(mut self, emails: Vec) -> Self { + self.emails = FieldAction::Set(emails); + self + } + + /// Ask to unset the emails of the user. + #[must_use] + pub fn unset_emails(mut self) -> Self { + self.emails = FieldAction::Unset; + self + } + + /// Call the given callback if the emails should be set or unset. + /// + /// # Parameters + /// + /// * `callback` - The callback to call. + pub fn on_emails(&self, callback: F) -> &Self + where + F: FnOnce(Option<&[String]>), + { + match &self.emails { + FieldAction::Unset => callback(None), + FieldAction::Set(emails) => callback(Some(emails)), + FieldAction::DoNothing => {} + } + + self + } +} + +#[async_trait::async_trait] +pub trait HomeserverConnection: Send + Sync { + /// Get the homeserver URL. + fn homeserver(&self) -> &str; + + /// Get the Matrix ID of the user with the given localpart. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user. + fn mxid(&self, localpart: &str) -> String { + format!("@{}:{}", localpart, self.homeserver()) + } + + /// Get the localpart of a Matrix ID if it has the right server name + /// + /// Returns [`None`] if the input isn't a valid MXID, or if the server name + /// doesn't match + /// + /// # Parameters + /// + /// * `mxid` - The MXID of the user + fn localpart<'a>(&self, mxid: &'a str) -> Option<&'a str> { + let mxid = <&UserId>::try_from(mxid).ok()?; + if mxid.server_name() != self.homeserver() { + return None; + } + Some(mxid.localpart()) + } + + /// Verify a bearer token coming from the homeserver for homeserver to MAS + /// interactions + /// + /// Returns `true` if the token is valid, `false` otherwise. + /// + /// # Parameters + /// + /// * `token` - The token to verify. + /// + /// # Errors + /// + /// Returns an error if the token failed to verify. + async fn verify_token(&self, token: &str) -> Result; + + /// Query the state of a user on the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to query. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the user does not + /// exist. + async fn query_user(&self, localpart: &str) -> Result; + + /// Provision a user on the homeserver. + /// + /// # Parameters + /// + /// * `request` - a [`ProvisionRequest`] containing the details of the user + /// to provision. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the user could not + /// be provisioned. + async fn provision_user(&self, request: &ProvisionRequest) -> Result; + + /// Check whether a given username is available on the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart to check. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable. + async fn is_localpart_available(&self, localpart: &str) -> Result; + + /// Create a device for a user on the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to create a device for. + /// * `device_id` - The device ID to create. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the device could + /// not be created. + async fn upsert_device( + &self, + localpart: &str, + device_id: &str, + initial_display_name: Option<&str>, + ) -> Result<(), anyhow::Error>; + + /// Update the display name of a device for a user on the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to update a device for. + /// * `device_id` - The device ID to update. + /// * `display_name` - The new display name to set + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the device could + /// not be updated. + async fn update_device_display_name( + &self, + localpart: &str, + device_id: &str, + display_name: &str, + ) -> Result<(), anyhow::Error>; + + /// Delete a device for a user on the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to delete a device for. + /// * `device_id` - The device ID to delete. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the device could + /// not be deleted. + async fn delete_device(&self, localpart: &str, device_id: &str) -> Result<(), anyhow::Error>; + + /// Sync the list of devices of a user with the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to sync the devices for. + /// * `devices` - The list of devices to sync. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the devices could + /// not be synced. + async fn sync_devices( + &self, + localpart: &str, + devices: HashSet, + ) -> Result<(), anyhow::Error>; + + /// Delete a user on the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to delete. + /// * `erase` - Whether to ask the homeserver to erase the user's data. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the user could not + /// be deleted. + async fn delete_user(&self, localpart: &str, erase: bool) -> Result<(), anyhow::Error>; + + /// Reactivate a user on the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to reactivate. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the user could not + /// be reactivated. + async fn reactivate_user(&self, localpart: &str) -> Result<(), anyhow::Error>; + + /// Set the displayname of a user on the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to set the displayname for. + /// * `displayname` - The displayname to set. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the displayname + /// could not be set. + async fn set_displayname( + &self, + localpart: &str, + displayname: &str, + ) -> Result<(), anyhow::Error>; + + /// Unset the displayname of a user on the homeserver. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to unset the displayname for. + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the displayname + /// could not be unset. + async fn unset_displayname(&self, localpart: &str) -> Result<(), anyhow::Error>; + + /// Temporarily allow a user to reset their cross-signing keys. + /// + /// # Parameters + /// + /// * `localpart` - The localpart of the user to allow cross-signing key + /// reset + /// + /// # Errors + /// + /// Returns an error if the homeserver is unreachable or the cross-signing + /// reset could not be allowed. + async fn allow_cross_signing_reset(&self, localpart: &str) -> Result<(), anyhow::Error>; +} + +#[async_trait::async_trait] +impl HomeserverConnection for &T { + fn homeserver(&self) -> &str { + (**self).homeserver() + } + + async fn verify_token(&self, token: &str) -> Result { + (**self).verify_token(token).await + } + + async fn query_user(&self, localpart: &str) -> Result { + (**self).query_user(localpart).await + } + + async fn provision_user(&self, request: &ProvisionRequest) -> Result { + (**self).provision_user(request).await + } + + async fn is_localpart_available(&self, localpart: &str) -> Result { + (**self).is_localpart_available(localpart).await + } + + async fn upsert_device( + &self, + localpart: &str, + device_id: &str, + initial_display_name: Option<&str>, + ) -> Result<(), anyhow::Error> { + (**self) + .upsert_device(localpart, device_id, initial_display_name) + .await + } + + async fn update_device_display_name( + &self, + localpart: &str, + device_id: &str, + display_name: &str, + ) -> Result<(), anyhow::Error> { + (**self) + .update_device_display_name(localpart, device_id, display_name) + .await + } + + async fn delete_device(&self, localpart: &str, device_id: &str) -> Result<(), anyhow::Error> { + (**self).delete_device(localpart, device_id).await + } + + async fn sync_devices( + &self, + localpart: &str, + devices: HashSet, + ) -> Result<(), anyhow::Error> { + (**self).sync_devices(localpart, devices).await + } + + async fn delete_user(&self, localpart: &str, erase: bool) -> Result<(), anyhow::Error> { + (**self).delete_user(localpart, erase).await + } + + async fn reactivate_user(&self, localpart: &str) -> Result<(), anyhow::Error> { + (**self).reactivate_user(localpart).await + } + + async fn set_displayname( + &self, + localpart: &str, + displayname: &str, + ) -> Result<(), anyhow::Error> { + (**self).set_displayname(localpart, displayname).await + } + + async fn unset_displayname(&self, localpart: &str) -> Result<(), anyhow::Error> { + (**self).unset_displayname(localpart).await + } + + async fn allow_cross_signing_reset(&self, localpart: &str) -> Result<(), anyhow::Error> { + (**self).allow_cross_signing_reset(localpart).await + } +} + +// Implement for Arc where T: HomeserverConnection +#[async_trait::async_trait] +impl HomeserverConnection for Arc { + fn homeserver(&self) -> &str { + (**self).homeserver() + } + + async fn verify_token(&self, token: &str) -> Result { + (**self).verify_token(token).await + } + + async fn query_user(&self, localpart: &str) -> Result { + (**self).query_user(localpart).await + } + + async fn provision_user(&self, request: &ProvisionRequest) -> Result { + (**self).provision_user(request).await + } + + async fn is_localpart_available(&self, localpart: &str) -> Result { + (**self).is_localpart_available(localpart).await + } + + async fn upsert_device( + &self, + localpart: &str, + device_id: &str, + initial_display_name: Option<&str>, + ) -> Result<(), anyhow::Error> { + (**self) + .upsert_device(localpart, device_id, initial_display_name) + .await + } + + async fn update_device_display_name( + &self, + localpart: &str, + device_id: &str, + display_name: &str, + ) -> Result<(), anyhow::Error> { + (**self) + .update_device_display_name(localpart, device_id, display_name) + .await + } + + async fn delete_device(&self, localpart: &str, device_id: &str) -> Result<(), anyhow::Error> { + (**self).delete_device(localpart, device_id).await + } + + async fn sync_devices( + &self, + localpart: &str, + devices: HashSet, + ) -> Result<(), anyhow::Error> { + (**self).sync_devices(localpart, devices).await + } + + async fn delete_user(&self, localpart: &str, erase: bool) -> Result<(), anyhow::Error> { + (**self).delete_user(localpart, erase).await + } + + async fn reactivate_user(&self, localpart: &str) -> Result<(), anyhow::Error> { + (**self).reactivate_user(localpart).await + } + + async fn set_displayname( + &self, + localpart: &str, + displayname: &str, + ) -> Result<(), anyhow::Error> { + (**self).set_displayname(localpart, displayname).await + } + + async fn unset_displayname(&self, localpart: &str) -> Result<(), anyhow::Error> { + (**self).unset_displayname(localpart).await + } + + async fn allow_cross_signing_reset(&self, localpart: &str) -> Result<(), anyhow::Error> { + (**self).allow_cross_signing_reset(localpart).await + } +} diff --git a/matrix-authentication-service/crates/matrix/src/mock.rs b/matrix-authentication-service/crates/matrix/src/mock.rs new file mode 100644 index 00000000..4180597e --- /dev/null +++ b/matrix-authentication-service/crates/matrix/src/mock.rs @@ -0,0 +1,282 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::{HashMap, HashSet}; + +use anyhow::Context; +use async_trait::async_trait; +use tokio::sync::RwLock; + +use crate::{MatrixUser, ProvisionRequest}; + +struct MockUser { + sub: String, + avatar_url: Option, + displayname: Option, + devices: HashSet, + emails: Option>, + cross_signing_reset_allowed: bool, + deactivated: bool, +} + +/// A mock implementation of a [`HomeserverConnection`], which never fails and +/// doesn't do anything. +pub struct HomeserverConnection { + homeserver: String, + users: RwLock>, + reserved_localparts: RwLock>, +} + +impl HomeserverConnection { + /// A valid bearer token that will be accepted by + /// [`crate::HomeserverConnection::verify_token`]. + pub const VALID_BEARER_TOKEN: &str = "mock_homeserver_bearer_token"; + + /// Create a new mock connection. + pub fn new(homeserver: H) -> Self + where + H: Into, + { + Self { + homeserver: homeserver.into(), + users: RwLock::new(HashMap::new()), + reserved_localparts: RwLock::new(HashSet::new()), + } + } + + pub async fn reserve_localpart(&self, localpart: &'static str) { + self.reserved_localparts.write().await.insert(localpart); + } +} + +#[async_trait] +impl crate::HomeserverConnection for HomeserverConnection { + fn homeserver(&self) -> &str { + &self.homeserver + } + + async fn verify_token(&self, token: &str) -> Result { + Ok(token == Self::VALID_BEARER_TOKEN) + } + + async fn query_user(&self, localpart: &str) -> Result { + let mxid = self.mxid(localpart); + let users = self.users.read().await; + let user = users.get(&mxid).context("User not found")?; + Ok(MatrixUser { + displayname: user.displayname.clone(), + avatar_url: user.avatar_url.clone(), + deactivated: user.deactivated, + }) + } + + async fn provision_user(&self, request: &ProvisionRequest) -> Result { + let mut users = self.users.write().await; + let mxid = self.mxid(request.localpart()); + let inserted = !users.contains_key(&mxid); + let user = users.entry(mxid).or_insert(MockUser { + sub: request.sub().to_owned(), + avatar_url: None, + displayname: None, + devices: HashSet::new(), + emails: None, + cross_signing_reset_allowed: false, + deactivated: false, + }); + + anyhow::ensure!( + user.sub == request.sub(), + "User already provisioned with different sub" + ); + + request.on_emails(|emails| { + user.emails = emails.map(ToOwned::to_owned); + }); + + request.on_displayname(|displayname| { + user.displayname = displayname.map(ToOwned::to_owned); + }); + + request.on_avatar_url(|avatar_url| { + user.avatar_url = avatar_url.map(ToOwned::to_owned); + }); + + Ok(inserted) + } + + async fn is_localpart_available(&self, localpart: &str) -> Result { + if self.reserved_localparts.read().await.contains(localpart) { + return Ok(false); + } + + let mxid = self.mxid(localpart); + let users = self.users.read().await; + Ok(!users.contains_key(&mxid)) + } + + async fn upsert_device( + &self, + localpart: &str, + device_id: &str, + _initial_display_name: Option<&str>, + ) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let mut users = self.users.write().await; + let user = users.get_mut(&mxid).context("User not found")?; + user.devices.insert(device_id.to_owned()); + Ok(()) + } + + async fn update_device_display_name( + &self, + localpart: &str, + device_id: &str, + _display_name: &str, + ) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let mut users = self.users.write().await; + let user = users.get_mut(&mxid).context("User not found")?; + user.devices.get(device_id).context("Device not found")?; + Ok(()) + } + + async fn delete_device(&self, localpart: &str, device_id: &str) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let mut users = self.users.write().await; + let user = users.get_mut(&mxid).context("User not found")?; + user.devices.remove(device_id); + Ok(()) + } + + async fn sync_devices( + &self, + localpart: &str, + devices: HashSet, + ) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let mut users = self.users.write().await; + let user = users.get_mut(&mxid).context("User not found")?; + user.devices = devices; + Ok(()) + } + + async fn delete_user(&self, localpart: &str, erase: bool) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let mut users = self.users.write().await; + let user = users.get_mut(&mxid).context("User not found")?; + user.devices.clear(); + user.emails = None; + user.deactivated = true; + if erase { + user.avatar_url = None; + user.displayname = None; + } + + Ok(()) + } + + async fn reactivate_user(&self, localpart: &str) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let mut users = self.users.write().await; + let user = users.get_mut(&mxid).context("User not found")?; + user.deactivated = false; + + Ok(()) + } + + async fn set_displayname( + &self, + localpart: &str, + displayname: &str, + ) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let mut users = self.users.write().await; + let user = users.get_mut(&mxid).context("User not found")?; + user.displayname = Some(displayname.to_owned()); + Ok(()) + } + + async fn unset_displayname(&self, localpart: &str) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let mut users = self.users.write().await; + let user = users.get_mut(&mxid).context("User not found")?; + user.displayname = None; + Ok(()) + } + + async fn allow_cross_signing_reset(&self, localpart: &str) -> Result<(), anyhow::Error> { + let mxid = self.mxid(localpart); + let mut users = self.users.write().await; + let user = users.get_mut(&mxid).context("User not found")?; + user.cross_signing_reset_allowed = true; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::HomeserverConnection as _; + + #[tokio::test] + async fn test_mock_connection() { + let conn = HomeserverConnection::new("example.org"); + + let mxid = "@test:example.org"; + let device = "test"; + assert_eq!(conn.homeserver(), "example.org"); + assert_eq!(conn.mxid("test"), mxid); + + assert!(conn.query_user("test").await.is_err()); + assert!(conn.upsert_device("test", device, None).await.is_err()); + assert!(conn.delete_device("test", device).await.is_err()); + + let request = ProvisionRequest::new("test", "test") + .set_displayname("Test User".into()) + .set_avatar_url("mxc://example.org/1234567890".into()) + .set_emails(vec!["test@example.org".to_owned()]); + + let inserted = conn.provision_user(&request).await.unwrap(); + assert!(inserted); + + let user = conn.query_user("test").await.unwrap(); + assert_eq!(user.displayname, Some("Test User".into())); + assert_eq!(user.avatar_url, Some("mxc://example.org/1234567890".into())); + + // Set the displayname again + assert!(conn.set_displayname("test", "John").await.is_ok()); + + let user = conn.query_user("test").await.unwrap(); + assert_eq!(user.displayname, Some("John".into())); + + // Unset the displayname + assert!(conn.unset_displayname("test").await.is_ok()); + + let user = conn.query_user("test").await.unwrap(); + assert_eq!(user.displayname, None); + + // Deleting a non-existent device should not fail + assert!(conn.delete_device("test", device).await.is_ok()); + + // Create the device + assert!(conn.upsert_device("test", device, None).await.is_ok()); + // Create the same device again + assert!(conn.upsert_device("test", device, None).await.is_ok()); + + // XXX: there is no API to query devices yet in the trait + // Delete the device + assert!(conn.delete_device("test", device).await.is_ok()); + + // The user we just created should be not available + assert!(!conn.is_localpart_available("test").await.unwrap()); + // But another user should be + assert!(conn.is_localpart_available("alice").await.unwrap()); + + // Reserve the localpart, it should not be available anymore + conn.reserve_localpart("alice").await; + assert!(!conn.is_localpart_available("alice").await.unwrap()); + } +} diff --git a/matrix-authentication-service/crates/matrix/src/readonly.rs b/matrix-authentication-service/crates/matrix/src/readonly.rs new file mode 100644 index 00000000..590583bf --- /dev/null +++ b/matrix-authentication-service/crates/matrix/src/readonly.rs @@ -0,0 +1,100 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashSet; + +use crate::{HomeserverConnection, MatrixUser, ProvisionRequest}; + +/// A wrapper around a [`HomeserverConnection`] that only allows read +/// operations. +pub struct ReadOnlyHomeserverConnection { + inner: C, +} + +impl ReadOnlyHomeserverConnection { + pub fn new(inner: C) -> Self + where + C: HomeserverConnection, + { + Self { inner } + } +} + +#[async_trait::async_trait] +impl HomeserverConnection for ReadOnlyHomeserverConnection { + fn homeserver(&self) -> &str { + self.inner.homeserver() + } + + async fn verify_token(&self, token: &str) -> Result { + self.inner.verify_token(token).await + } + + async fn query_user(&self, localpart: &str) -> Result { + self.inner.query_user(localpart).await + } + + async fn provision_user(&self, _request: &ProvisionRequest) -> Result { + anyhow::bail!("Provisioning is not supported in read-only mode"); + } + + async fn is_localpart_available(&self, localpart: &str) -> Result { + self.inner.is_localpart_available(localpart).await + } + + async fn upsert_device( + &self, + _localpart: &str, + _device_id: &str, + _initial_display_name: Option<&str>, + ) -> Result<(), anyhow::Error> { + anyhow::bail!("Device creation is not supported in read-only mode"); + } + + async fn update_device_display_name( + &self, + _localpart: &str, + _device_id: &str, + _display_name: &str, + ) -> Result<(), anyhow::Error> { + anyhow::bail!("Device display name update is not supported in read-only mode"); + } + + async fn delete_device(&self, _localpart: &str, _device_id: &str) -> Result<(), anyhow::Error> { + anyhow::bail!("Device deletion is not supported in read-only mode"); + } + + async fn sync_devices( + &self, + _localpart: &str, + _devices: HashSet, + ) -> Result<(), anyhow::Error> { + anyhow::bail!("Device synchronization is not supported in read-only mode"); + } + + async fn delete_user(&self, _localpart: &str, _erase: bool) -> Result<(), anyhow::Error> { + anyhow::bail!("User deletion is not supported in read-only mode"); + } + + async fn reactivate_user(&self, _localpart: &str) -> Result<(), anyhow::Error> { + anyhow::bail!("User reactivation is not supported in read-only mode"); + } + + async fn set_displayname( + &self, + _localpart: &str, + _displayname: &str, + ) -> Result<(), anyhow::Error> { + anyhow::bail!("User displayname update is not supported in read-only mode"); + } + + async fn unset_displayname(&self, _localpart: &str) -> Result<(), anyhow::Error> { + anyhow::bail!("User displayname update is not supported in read-only mode"); + } + + async fn allow_cross_signing_reset(&self, _localpart: &str) -> Result<(), anyhow::Error> { + anyhow::bail!("Allowing cross-signing reset is not supported in read-only mode"); + } +} diff --git a/matrix-authentication-service/crates/oauth2-types/Cargo.toml b/matrix-authentication-service/crates/oauth2-types/Cargo.toml new file mode 100644 index 00000000..2432e34f --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/Cargo.toml @@ -0,0 +1,37 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "oauth2-types" +description = "OAuth 2.0 types used by the Matrix Authentication Service" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +base64ct.workspace = true +chrono.workspace = true +indexmap.workspace = true +language-tags.workspace = true +serde_json.workspace = true +serde_with.workspace = true +serde.workspace = true +sha2.workspace = true +thiserror.workspace = true +url.workspace = true + +mas-iana.workspace = true +mas-jose.workspace = true + +[dev-dependencies] +assert_matches.workspace = true +insta.workspace = true diff --git a/matrix-authentication-service/crates/oauth2-types/src/errors.rs b/matrix-authentication-service/crates/oauth2-types/src/errors.rs new file mode 100644 index 00000000..b3077636 --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/errors.rs @@ -0,0 +1,619 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Error types returned by an authorization server. + +use std::borrow::Cow; + +use serde::{Deserialize, Serialize}; +use serde_with::{DeserializeFromStr, SerializeDisplay}; + +/// A client error returned by an authorization server. +/// +/// To construct this with a default description for the error code, use its +/// `From` implementation. +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ClientError { + /// The error code. + pub error: ClientErrorCode, + + /// A human-readable description of the error. + #[serde(skip_serializing_if = "Option::is_none")] + pub error_description: Option>, +} + +impl ClientError { + /// Creates a new `ClientError` with the given error code and description. + #[must_use] + pub const fn new(error: ClientErrorCode, error_description: &'static str) -> Self { + Self { + error, + error_description: Some(Cow::Borrowed(error_description)), + } + } + + /// Changes the description of this `ClientError` with the given `String`. + #[must_use] + pub fn with_description(mut self, description: String) -> Self { + self.error_description = Some(Cow::Owned(description)); + self + } +} + +impl From for ClientError { + fn from(error: ClientErrorCode) -> Self { + let desc = error.default_description(); + Self::new(error, desc) + } +} + +/// Client error codes defined in OAuth2.0, OpenID Connect and their extensions. +#[derive(Debug, Clone, PartialEq, Eq, SerializeDisplay, DeserializeFromStr)] +pub enum ClientErrorCode { + /// `invalid_request` + /// + /// The request is missing a required parameter, includes an invalid + /// parameter value, includes a parameter more than once, or is otherwise + /// malformed. + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-5.2). + InvalidRequest, + + /// `invalid_client` + /// + /// Client authentication failed (e.g., unknown client, no client + /// authentication included, or unsupported authentication method). + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-5.2). + InvalidClient, + + /// `invalid_grant` + /// + /// The provided authorization grant (e.g., authorization code, resource + /// owner credentials) or refresh token is invalid, expired, revoked, does + /// not match the redirection URI used in the authorization request, or was + /// issued to another client. + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-5.2). + InvalidGrant, + + /// `unauthorized_client` + /// + /// The authenticated client is not authorized to use this authorization + /// grant type. + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-5.2). + UnauthorizedClient, + + /// `unsupported_grant_type` + /// + /// The authorization grant type is not supported by the authorization + /// server. + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-5.2). + UnsupportedGrantType, + + /// `access_denied` + /// + /// The resource owner or authorization server denied the request. + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-4.1.2.1). + AccessDenied, + + /// `unsupported_response_type` + /// + /// The authorization server does not support obtaining an authorization + /// code using this method. + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-4.1.2.1). + UnsupportedResponseType, + + /// `invalid_scope` + /// + /// The requested scope is invalid, unknown, malformed, or exceeds the scope + /// granted by the resource owner. + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-4.1.2.1). + InvalidScope, + + /// `server_error` + /// + /// The authorization server encountered an unexpected condition that + /// prevented it from fulfilling the request. + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-4.1.2.1). + ServerError, + + /// `temporarily_unavailable` + /// + /// The authorization server is currently unable to handle the request due + /// to a temporary overloading or maintenance of the server. + /// + /// From [RFC6749](https://www.rfc-editor.org/rfc/rfc6749#section-4.1.2.1). + TemporarilyUnavailable, + + /// `interaction_required` + /// + /// The authorization server requires end-user interaction of some form to + /// proceed. + /// + /// From [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthError). + InteractionRequired, + + /// `login_required` + /// + /// The authorization server requires end-user authentication. + /// + /// From [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthError). + LoginRequired, + + /// `account_selection_required` + /// + /// The end-user is required to select a session at the authorization + /// server. + /// + /// From [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthError). + AccountSelectionRequired, + + /// `consent_required` + /// + /// The authorization server requires end-user consent. + /// + /// From [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthError). + ConsentRequired, + + /// `invalid_request_uri` + /// + /// The `request_uri` in the authorization request returns an error or + /// contains invalid data. + /// + /// From [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthError). + InvalidRequestUri, + + /// `invalid_request_object` + /// + /// The request parameter contains an invalid request object. + /// + /// From [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthError). + InvalidRequestObject, + + /// `request_not_supported` + /// + /// The authorization server does not support use of the `request` + /// parameter. + /// + /// From [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthError). + RequestNotSupported, + + /// `request_uri_not_supported` + /// + /// The authorization server does not support use of the `request_uri` + /// parameter. + /// + /// From [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthError). + RequestUriNotSupported, + + /// `registration_not_supported` + /// + /// The authorization server does not support use of the `registration` + /// parameter. + /// + /// From [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthError). + RegistrationNotSupported, + + /// `invalid_redirect_uri` + /// + /// The value of one or more redirection URIs is invalid. + /// + /// From [RFC7591](https://www.rfc-editor.org/rfc/rfc7591#section-3.2.2). + InvalidRedirectUri, + + /// `invalid_client_metadata` + /// + /// The value of one of the client metadata fields is invalid and the server + /// has rejected this request. + /// + /// From [RFC7591](https://www.rfc-editor.org/rfc/rfc7591#section-3.2.2). + InvalidClientMetadata, + + /// `authorization_pending` + /// + /// The authorization request is still pending as the end user hasn't yet + /// completed the user-interaction steps. + /// + /// The client should repeat the access token request to the token endpoint + /// (a process known as polling). Before each new request, the client + /// must wait at least the number of seconds specified by the `interval` + /// parameter of the device authorization response, or 5 seconds if none was + /// provided, and respect any increase in the polling interval required + /// by the [`ClientErrorCode::SlowDown`] error. + /// + /// From [RFC8628](https://www.rfc-editor.org/rfc/rfc8628#section-3.5). + AuthorizationPending, + + /// `slow_down` + /// + /// A variant of [`ClientErrorCode::AuthorizationPending`], the + /// authorization request is still pending and polling should continue, + /// but the interval must be increased by 5 seconds for this and all + /// subsequent requests. + /// + /// From [RFC8628](https://www.rfc-editor.org/rfc/rfc8628#section-3.5). + SlowDown, + + /// `expired_token` + /// + /// The `device_code` has expired, and the device authorization session has + /// concluded. + /// + /// The client may commence a new device authorization request but should + /// wait for user interaction before restarting to avoid unnecessary + /// polling. + /// + /// From [RFC8628](https://www.rfc-editor.org/rfc/rfc8628#section-3.5). + ExpiredToken, + + /// `unsupported_token_type` + /// + /// The authorization server does not support the revocation of the + /// presented token type. That is, the client tried to revoke an access + /// token on a server not supporting this feature. + /// + /// From [RFC7009](https://www.rfc-editor.org/rfc/rfc7009#section-2.2.1). + UnsupportedTokenType, + + /// Another error code. + Unknown(String), +} + +impl core::fmt::Display for ClientErrorCode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + ClientErrorCode::InvalidRequest => f.write_str("invalid_request"), + ClientErrorCode::InvalidClient => f.write_str("invalid_client"), + ClientErrorCode::InvalidGrant => f.write_str("invalid_grant"), + ClientErrorCode::UnauthorizedClient => f.write_str("unauthorized_client"), + ClientErrorCode::UnsupportedGrantType => f.write_str("unsupported_grant_type"), + ClientErrorCode::AccessDenied => f.write_str("access_denied"), + ClientErrorCode::UnsupportedResponseType => f.write_str("unsupported_response_type"), + ClientErrorCode::InvalidScope => f.write_str("invalid_scope"), + ClientErrorCode::ServerError => f.write_str("server_error"), + ClientErrorCode::TemporarilyUnavailable => f.write_str("temporarily_unavailable"), + ClientErrorCode::InteractionRequired => f.write_str("interaction_required"), + ClientErrorCode::LoginRequired => f.write_str("login_required"), + ClientErrorCode::AccountSelectionRequired => f.write_str("account_selection_required"), + ClientErrorCode::ConsentRequired => f.write_str("consent_required"), + ClientErrorCode::InvalidRequestUri => f.write_str("invalid_request_uri"), + ClientErrorCode::InvalidRequestObject => f.write_str("invalid_request_object"), + ClientErrorCode::RequestNotSupported => f.write_str("request_not_supported"), + ClientErrorCode::RequestUriNotSupported => f.write_str("request_uri_not_supported"), + ClientErrorCode::RegistrationNotSupported => f.write_str("registration_not_supported"), + ClientErrorCode::InvalidRedirectUri => f.write_str("invalid_redirect_uri"), + ClientErrorCode::InvalidClientMetadata => f.write_str("invalid_client_metadata"), + ClientErrorCode::AuthorizationPending => f.write_str("authorization_pending"), + ClientErrorCode::SlowDown => f.write_str("slow_down"), + ClientErrorCode::ExpiredToken => f.write_str("expired_token"), + ClientErrorCode::UnsupportedTokenType => f.write_str("unsupported_token_type"), + ClientErrorCode::Unknown(value) => f.write_str(value), + } + } +} + +impl core::str::FromStr for ClientErrorCode { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "invalid_request" => Ok(ClientErrorCode::InvalidRequest), + "invalid_client" => Ok(ClientErrorCode::InvalidClient), + "invalid_grant" => Ok(ClientErrorCode::InvalidGrant), + "unauthorized_client" => Ok(ClientErrorCode::UnauthorizedClient), + "unsupported_grant_type" => Ok(ClientErrorCode::UnsupportedGrantType), + "access_denied" => Ok(ClientErrorCode::AccessDenied), + "unsupported_response_type" => Ok(ClientErrorCode::UnsupportedResponseType), + "invalid_scope" => Ok(ClientErrorCode::InvalidScope), + "server_error" => Ok(ClientErrorCode::ServerError), + "temporarily_unavailable" => Ok(ClientErrorCode::TemporarilyUnavailable), + "interaction_required" => Ok(ClientErrorCode::InteractionRequired), + "login_required" => Ok(ClientErrorCode::LoginRequired), + "account_selection_required" => Ok(ClientErrorCode::AccountSelectionRequired), + "consent_required" => Ok(ClientErrorCode::ConsentRequired), + "invalid_request_uri" => Ok(ClientErrorCode::InvalidRequestUri), + "invalid_request_object" => Ok(ClientErrorCode::InvalidRequestObject), + "request_not_supported" => Ok(ClientErrorCode::RequestNotSupported), + "request_uri_not_supported" => Ok(ClientErrorCode::RequestUriNotSupported), + "registration_not_supported" => Ok(ClientErrorCode::RegistrationNotSupported), + "invalid_redirect_uri" => Ok(ClientErrorCode::InvalidRedirectUri), + "invalid_client_metadata" => Ok(ClientErrorCode::InvalidClientMetadata), + "authorization_pending" => Ok(ClientErrorCode::AuthorizationPending), + "slow_down" => Ok(ClientErrorCode::SlowDown), + "expired_token" => Ok(ClientErrorCode::ExpiredToken), + "unsupported_token_type" => Ok(ClientErrorCode::UnsupportedTokenType), + _ => Ok(ClientErrorCode::Unknown(s.to_owned())), + } + } +} + +impl ClientErrorCode { + /// Get the default description for this `ClientErrorCode`. + /// + /// Note that [`ClientErrorCode::Unknown`] returns an empty string. + #[must_use] + pub fn default_description(&self) -> &'static str { + match self { + ClientErrorCode::InvalidRequest => { + "The request is missing a required parameter, includes an \ + invalid parameter value, includes a parameter more than once, \ + or is otherwise malformed." + } + ClientErrorCode::InvalidClient => "Client authentication failed.", + ClientErrorCode::InvalidGrant => { + "The provided access grant is invalid, expired, or revoked." + } + ClientErrorCode::UnauthorizedClient => { + "The client is not authorized to request an access token using this method." + } + ClientErrorCode::UnsupportedGrantType => { + "The authorization grant type is not supported by the authorization server." + } + ClientErrorCode::AccessDenied => { + "The resource owner or authorization server denied the request." + } + ClientErrorCode::UnsupportedResponseType => { + "The authorization server does not support obtaining an access \ + token using this method." + } + ClientErrorCode::InvalidScope => { + "The requested scope is invalid, unknown, or malformed." + } + ClientErrorCode::ServerError => { + "The authorization server encountered an unexpected condition \ + that prevented it from fulfilling the request." + } + ClientErrorCode::TemporarilyUnavailable => { + "The authorization server is currently unable to handle the request \ + due to a temporary overloading or maintenance of the server." + } + ClientErrorCode::InteractionRequired => { + "The Authorization Server requires End-User interaction of some form to proceed." + } + ClientErrorCode::LoginRequired => { + "The Authorization Server requires End-User authentication." + } + ClientErrorCode::AccountSelectionRequired => { + "The End-User is required to select a session at the Authorization Server." + } + ClientErrorCode::ConsentRequired => { + "The Authorization Server requires End-User consent." + } + ClientErrorCode::InvalidRequestUri => { + "The request_uri in the Authorization Request returns an error \ + or contains invalid data." + } + ClientErrorCode::InvalidRequestObject => { + "The request parameter contains an invalid Request Object." + } + ClientErrorCode::RequestNotSupported => { + "The provider does not support use of the request parameter." + } + ClientErrorCode::RequestUriNotSupported => { + "The provider does not support use of the request_uri parameter." + } + ClientErrorCode::RegistrationNotSupported => { + "The provider does not support use of the registration parameter." + } + ClientErrorCode::InvalidRedirectUri => { + "The value of one or more redirection URIs is invalid." + } + ClientErrorCode::InvalidClientMetadata => { + "The value of one of the client metadata fields is invalid" + } + ClientErrorCode::AuthorizationPending => "The authorization request is still pending", + ClientErrorCode::SlowDown => { + "The interval must be increased by 5 seconds for this and all subsequent requests" + } + ClientErrorCode::ExpiredToken => { + "The \"device_code\" has expired, and the device authorization session has concluded" + } + ClientErrorCode::UnsupportedTokenType => { + "The authorization server does not support the revocation of the presented token type." + } + ClientErrorCode::Unknown(_) => "", + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn serialize_client_error_code() { + assert_eq!( + serde_json::to_string(&ClientErrorCode::InvalidRequest).unwrap(), + "\"invalid_request\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::InvalidClient).unwrap(), + "\"invalid_client\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::InvalidGrant).unwrap(), + "\"invalid_grant\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::UnauthorizedClient).unwrap(), + "\"unauthorized_client\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::UnsupportedGrantType).unwrap(), + "\"unsupported_grant_type\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::AccessDenied).unwrap(), + "\"access_denied\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::UnsupportedResponseType).unwrap(), + "\"unsupported_response_type\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::InvalidScope).unwrap(), + "\"invalid_scope\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::ServerError).unwrap(), + "\"server_error\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::TemporarilyUnavailable).unwrap(), + "\"temporarily_unavailable\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::InteractionRequired).unwrap(), + "\"interaction_required\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::LoginRequired).unwrap(), + "\"login_required\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::AccountSelectionRequired).unwrap(), + "\"account_selection_required\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::ConsentRequired).unwrap(), + "\"consent_required\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::InvalidRequestUri).unwrap(), + "\"invalid_request_uri\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::InvalidRequestObject).unwrap(), + "\"invalid_request_object\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::RequestNotSupported).unwrap(), + "\"request_not_supported\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::RequestUriNotSupported).unwrap(), + "\"request_uri_not_supported\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::RegistrationNotSupported).unwrap(), + "\"registration_not_supported\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::InvalidRedirectUri).unwrap(), + "\"invalid_redirect_uri\"" + ); + assert_eq!( + serde_json::to_string(&ClientErrorCode::InvalidClientMetadata).unwrap(), + "\"invalid_client_metadata\"" + ); + + assert_eq!( + serde_json::to_string(&ClientErrorCode::Unknown("unknown_error_code".to_owned())) + .unwrap(), + "\"unknown_error_code\"" + ); + } + + #[test] + fn deserialize_client_error_code() { + assert_eq!( + serde_json::from_str::("\"invalid_request\"").unwrap(), + ClientErrorCode::InvalidRequest + ); + assert_eq!( + serde_json::from_str::("\"invalid_client\"").unwrap(), + ClientErrorCode::InvalidClient + ); + assert_eq!( + serde_json::from_str::("\"invalid_grant\"").unwrap(), + ClientErrorCode::InvalidGrant + ); + assert_eq!( + serde_json::from_str::("\"unauthorized_client\"").unwrap(), + ClientErrorCode::UnauthorizedClient + ); + assert_eq!( + serde_json::from_str::("\"unsupported_grant_type\"").unwrap(), + ClientErrorCode::UnsupportedGrantType + ); + assert_eq!( + serde_json::from_str::("\"access_denied\"").unwrap(), + ClientErrorCode::AccessDenied + ); + assert_eq!( + serde_json::from_str::("\"unsupported_response_type\"").unwrap(), + ClientErrorCode::UnsupportedResponseType + ); + assert_eq!( + serde_json::from_str::("\"invalid_scope\"").unwrap(), + ClientErrorCode::InvalidScope + ); + assert_eq!( + serde_json::from_str::("\"server_error\"").unwrap(), + ClientErrorCode::ServerError + ); + assert_eq!( + serde_json::from_str::("\"temporarily_unavailable\"").unwrap(), + ClientErrorCode::TemporarilyUnavailable + ); + assert_eq!( + serde_json::from_str::("\"interaction_required\"").unwrap(), + ClientErrorCode::InteractionRequired + ); + assert_eq!( + serde_json::from_str::("\"login_required\"").unwrap(), + ClientErrorCode::LoginRequired + ); + assert_eq!( + serde_json::from_str::("\"account_selection_required\"").unwrap(), + ClientErrorCode::AccountSelectionRequired + ); + assert_eq!( + serde_json::from_str::("\"consent_required\"").unwrap(), + ClientErrorCode::ConsentRequired + ); + assert_eq!( + serde_json::from_str::("\"invalid_request_uri\"").unwrap(), + ClientErrorCode::InvalidRequestUri + ); + assert_eq!( + serde_json::from_str::("\"invalid_request_object\"").unwrap(), + ClientErrorCode::InvalidRequestObject + ); + assert_eq!( + serde_json::from_str::("\"request_not_supported\"").unwrap(), + ClientErrorCode::RequestNotSupported + ); + assert_eq!( + serde_json::from_str::("\"request_uri_not_supported\"").unwrap(), + ClientErrorCode::RequestUriNotSupported + ); + assert_eq!( + serde_json::from_str::("\"registration_not_supported\"").unwrap(), + ClientErrorCode::RegistrationNotSupported + ); + assert_eq!( + serde_json::from_str::("\"invalid_redirect_uri\"").unwrap(), + ClientErrorCode::InvalidRedirectUri + ); + assert_eq!( + serde_json::from_str::("\"invalid_client_metadata\"").unwrap(), + ClientErrorCode::InvalidClientMetadata + ); + + assert_eq!( + serde_json::from_str::("\"unknown_error_code\"").unwrap(), + ClientErrorCode::Unknown("unknown_error_code".to_owned()) + ); + } +} diff --git a/matrix-authentication-service/crates/oauth2-types/src/lib.rs b/matrix-authentication-service/crates/oauth2-types/src/lib.rs new file mode 100644 index 00000000..e937288f --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/lib.rs @@ -0,0 +1,33 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! [OAuth 2.0] and [OpenID Connect] types. +//! +//! This is part of the [Matrix Authentication Service] project. +//! +//! [OAuth 2.0]: https://oauth.net/2/ +//! [OpenID Connect]: https://openid.net/connect/ +//! [Matrix Authentication Service]: https://github.com/element-hq/matrix-authentication-service + +#![deny(missing_docs)] +#![allow(clippy::module_name_repetitions)] + +pub mod errors; +pub mod oidc; +pub mod pkce; +pub mod registration; +pub mod requests; +pub mod response_type; +pub mod scope; +pub mod webfinger; + +/// Traits intended for blanket imports. +pub mod prelude { + pub use crate::pkce::CodeChallengeMethodExt; +} + +#[cfg(test)] +mod test_utils; diff --git a/matrix-authentication-service/crates/oauth2-types/src/oidc.rs b/matrix-authentication-service/crates/oauth2-types/src/oidc.rs new file mode 100644 index 00000000..9ac67278 --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/oidc.rs @@ -0,0 +1,1819 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Types to interact with the [OpenID Connect] specification. +//! +//! [OpenID Connect]: https://openid.net/connect/ + +use std::{fmt, ops::Deref}; + +use language_tags::LanguageTag; +use mas_iana::{ + jose::{JsonWebEncryptionAlg, JsonWebEncryptionEnc, JsonWebSignatureAlg}, + oauth::{OAuthAccessTokenType, OAuthClientAuthenticationMethod, PkceCodeChallengeMethod}, +}; +use serde::{Deserialize, Serialize}; +use serde_with::{ + DeserializeFromStr, SerializeDisplay, StringWithSeparator, formats::SpaceSeparator, serde_as, + skip_serializing_none, +}; +use thiserror::Error; +use url::Url; + +use crate::{ + requests::{Display, GrantType, Prompt, ResponseMode}, + response_type::ResponseType, +}; + +/// An enum for types that accept either an [`OAuthClientAuthenticationMethod`] +/// or an [`OAuthAccessTokenType`]. +#[derive(SerializeDisplay, DeserializeFromStr, Clone, PartialEq, Eq, Hash, Debug)] +pub enum AuthenticationMethodOrAccessTokenType { + /// An authentication method. + AuthenticationMethod(OAuthClientAuthenticationMethod), + + /// An access token type. + AccessTokenType(OAuthAccessTokenType), + + /// An unknown value. + /// + /// Note that this variant should only be used as the result parsing a + /// string of unknown type. To build a custom variant, first parse a + /// string with the wanted type then use `.into()`. + Unknown(String), +} + +impl core::fmt::Display for AuthenticationMethodOrAccessTokenType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::AuthenticationMethod(m) => m.fmt(f), + Self::AccessTokenType(t) => t.fmt(f), + Self::Unknown(s) => s.fmt(f), + } + } +} + +impl core::str::FromStr for AuthenticationMethodOrAccessTokenType { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match OAuthClientAuthenticationMethod::from_str(s) { + Ok(OAuthClientAuthenticationMethod::Unknown(_)) | Err(_) => {} + Ok(m) => return Ok(m.into()), + } + + match OAuthAccessTokenType::from_str(s) { + Ok(OAuthAccessTokenType::Unknown(_)) | Err(_) => {} + Ok(m) => return Ok(m.into()), + } + + Ok(Self::Unknown(s.to_owned())) + } +} + +impl AuthenticationMethodOrAccessTokenType { + /// Get the authentication method of this + /// `AuthenticationMethodOrAccessTokenType`. + #[must_use] + pub fn authentication_method(&self) -> Option<&OAuthClientAuthenticationMethod> { + match self { + Self::AuthenticationMethod(m) => Some(m), + _ => None, + } + } + + /// Get the access token type of this + /// `AuthenticationMethodOrAccessTokenType`. + #[must_use] + pub fn access_token_type(&self) -> Option<&OAuthAccessTokenType> { + match self { + Self::AccessTokenType(t) => Some(t), + _ => None, + } + } +} + +impl From for AuthenticationMethodOrAccessTokenType { + fn from(t: OAuthClientAuthenticationMethod) -> Self { + Self::AuthenticationMethod(t) + } +} + +impl From for AuthenticationMethodOrAccessTokenType { + fn from(t: OAuthAccessTokenType) -> Self { + Self::AccessTokenType(t) + } +} + +/// The kind of an application. +#[derive(SerializeDisplay, DeserializeFromStr, Clone, PartialEq, Eq, Hash, Debug)] +pub enum ApplicationType { + /// A web application. + Web, + + /// A native application. + Native, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for ApplicationType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Web => f.write_str("web"), + Self::Native => f.write_str("native"), + Self::Unknown(s) => f.write_str(s), + } + } +} + +impl core::str::FromStr for ApplicationType { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "web" => Ok(Self::Web), + "native" => Ok(Self::Native), + s => Ok(Self::Unknown(s.to_owned())), + } + } +} + +/// Subject Identifier types. +/// +/// A Subject Identifier is a locally unique and never reassigned identifier +/// within the Issuer for the End-User, which is intended to be consumed by the +/// Client. +#[derive(SerializeDisplay, DeserializeFromStr, Clone, PartialEq, Eq, Hash, Debug)] +pub enum SubjectType { + /// This provides the same `sub` (subject) value to all Clients. + Public, + + /// This provides a different `sub` value to each Client, so as not to + /// enable Clients to correlate the End-User's activities without + /// permission. + Pairwise, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for SubjectType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Public => f.write_str("public"), + Self::Pairwise => f.write_str("pairwise"), + Self::Unknown(s) => f.write_str(s), + } + } +} + +impl core::str::FromStr for SubjectType { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "public" => Ok(Self::Public), + "pairwise" => Ok(Self::Pairwise), + s => Ok(Self::Unknown(s.to_owned())), + } + } +} + +/// Claim types. +#[derive(SerializeDisplay, DeserializeFromStr, Clone, PartialEq, Eq, Hash, Debug)] +pub enum ClaimType { + /// Claims that are directly asserted by the OpenID Provider. + Normal, + + /// Claims that are asserted by a Claims Provider other than the OpenID + /// Provider but are returned by OpenID Provider. + Aggregated, + + /// Claims that are asserted by a Claims Provider other than the OpenID + /// Provider but are returned as references by the OpenID Provider. + Distributed, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for ClaimType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Normal => f.write_str("normal"), + Self::Aggregated => f.write_str("aggregated"), + Self::Distributed => f.write_str("distributed"), + Self::Unknown(s) => f.write_str(s), + } + } +} + +impl core::str::FromStr for ClaimType { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "normal" => Ok(Self::Normal), + "aggregated" => Ok(Self::Aggregated), + "distributed" => Ok(Self::Distributed), + s => Ok(Self::Unknown(s.to_owned())), + } + } +} + +/// An account management action that a user can take. +/// +/// Source: +#[derive( + SerializeDisplay, DeserializeFromStr, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, +)] +#[non_exhaustive] +pub enum AccountManagementAction { + /// `org.matrix.profile` + /// + /// The user wishes to view their profile (name, avatar, contact details). + Profile, + + /// `org.matrix.sessions_list` + /// + /// The user wishes to view a list of their sessions. + SessionsList, + + /// `org.matrix.session_view` + /// + /// The user wishes to view the details of a specific session. + SessionView, + + /// `org.matrix.session_end` + /// + /// The user wishes to end/log out of a specific session. + SessionEnd, + + /// `org.matrix.account_deactivate` + /// + /// The user wishes to deactivate their account. + AccountDeactivate, + + /// `org.matrix.cross_signing_reset` + /// + /// The user wishes to reset their cross-signing keys. + CrossSigningReset, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for AccountManagementAction { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Profile => write!(f, "org.matrix.profile"), + Self::SessionsList => write!(f, "org.matrix.sessions_list"), + Self::SessionView => write!(f, "org.matrix.session_view"), + Self::SessionEnd => write!(f, "org.matrix.session_end"), + Self::AccountDeactivate => write!(f, "org.matrix.account_deactivate"), + Self::CrossSigningReset => write!(f, "org.matrix.cross_signing_reset"), + Self::Unknown(value) => write!(f, "{value}"), + } + } +} + +impl core::str::FromStr for AccountManagementAction { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "org.matrix.profile" => Ok(Self::Profile), + "org.matrix.sessions_list" => Ok(Self::SessionsList), + "org.matrix.session_view" => Ok(Self::SessionView), + "org.matrix.session_end" => Ok(Self::SessionEnd), + "org.matrix.account_deactivate" => Ok(Self::AccountDeactivate), + "org.matrix.cross_signing_reset" => Ok(Self::CrossSigningReset), + value => Ok(Self::Unknown(value.to_owned())), + } + } +} + +/// The default value of `response_modes_supported` if it is not set. +pub static DEFAULT_RESPONSE_MODES_SUPPORTED: &[ResponseMode] = + &[ResponseMode::Query, ResponseMode::Fragment]; + +/// The default value of `grant_types_supported` if it is not set. +pub static DEFAULT_GRANT_TYPES_SUPPORTED: &[GrantType] = + &[GrantType::AuthorizationCode, GrantType::Implicit]; + +/// The default value of `token_endpoint_auth_methods_supported` if it is not +/// set. +pub static DEFAULT_AUTH_METHODS_SUPPORTED: &[OAuthClientAuthenticationMethod] = + &[OAuthClientAuthenticationMethod::ClientSecretBasic]; + +/// The default value of `claim_types_supported` if it is not set. +pub static DEFAULT_CLAIM_TYPES_SUPPORTED: &[ClaimType] = &[ClaimType::Normal]; + +/// Authorization server metadata, as described by the [IANA registry]. +/// +/// All the fields with a default value are accessible via methods. +/// +/// [IANA registry]: https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#authorization-server-metadata +#[skip_serializing_none] +#[derive(Debug, Serialize, Deserialize, Clone, Default)] +pub struct ProviderMetadata { + /// Authorization server's issuer identifier URL. + /// + /// This field is required. The URL must use a `https` scheme, and must not + /// contain a query or fragment. It must match the one used to build the + /// well-known URI to query this metadata. + pub issuer: Option, + + /// URL of the authorization server's [authorization endpoint]. + /// + /// This field is required. The URL must use a `https` scheme, and must not + /// contain a fragment. + /// + /// [authorization endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.1 + pub authorization_endpoint: Option, + + /// URL of the authorization server's [token endpoint]. + /// + /// This field is required. The URL must use a `https` scheme, and must not + /// contain a fragment. + /// + /// [token endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.2 + pub token_endpoint: Option, + + /// URL of the authorization server's [JWK] Set document. + /// + /// This field is required. The URL must use a `https` scheme. + /// + /// [JWK]: https://www.rfc-editor.org/rfc/rfc7517.html + pub jwks_uri: Option, + + /// URL of the authorization server's [OAuth 2.0 Dynamic Client + /// Registration] endpoint. + /// + /// If this field is present, the URL must use a `https` scheme. + /// + /// [OAuth 2.0 Dynamic Client Registration]: https://www.rfc-editor.org/rfc/rfc7591 + pub registration_endpoint: Option, + + /// JSON array containing a list of the OAuth 2.0 `scope` values that this + /// authorization server supports. + /// + /// If this field is present, it must contain at least the `openid` scope + /// value. + pub scopes_supported: Option>, + + /// JSON array containing a list of the [OAuth 2.0 `response_type` values] + /// that this authorization server supports. + /// + /// This field is required. + /// + /// [OAuth 2.0 `response_type` values]: https://www.rfc-editor.org/rfc/rfc7591#page-9 + pub response_types_supported: Option>, + + /// JSON array containing a list of the [OAuth 2.0 `response_mode` values] + /// that this authorization server supports. + /// + /// Defaults to [`DEFAULT_RESPONSE_MODES_SUPPORTED`]. + /// + /// [OAuth 2.0 `response_mode` values]: https://openid.net/specs/oauth-v2-multiple-response-types-1_0.html + pub response_modes_supported: Option>, + + /// JSON array containing a list of the [OAuth 2.0 `grant_type` values] that + /// this authorization server supports. + /// + /// Defaults to [`DEFAULT_GRANT_TYPES_SUPPORTED`]. + /// + /// [OAuth 2.0 `grant_type` values]: https://www.rfc-editor.org/rfc/rfc7591#page-9 + pub grant_types_supported: Option>, + + /// JSON array containing a list of client authentication methods supported + /// by this token endpoint. + /// + /// Defaults to [`DEFAULT_AUTH_METHODS_SUPPORTED`]. + pub token_endpoint_auth_methods_supported: Option>, + + /// JSON array containing a list of the JWS signing algorithms supported + /// by the token endpoint for the signature on the JWT used to + /// authenticate the client at the token endpoint. + /// + /// If this field is present, it must not contain + /// [`JsonWebSignatureAlg::None`]. This field is required if + /// `token_endpoint_auth_methods_supported` contains + /// [`OAuthClientAuthenticationMethod::PrivateKeyJwt`] or + /// [`OAuthClientAuthenticationMethod::ClientSecretJwt`]. + pub token_endpoint_auth_signing_alg_values_supported: Option>, + + /// URL of a page containing human-readable information that developers + /// might want or need to know when using the authorization server. + pub service_documentation: Option, + + /// Languages and scripts supported for the user interface, represented as a + /// JSON array of language tag values from BCP 47. + /// + /// If omitted, the set of supported languages and scripts is unspecified. + pub ui_locales_supported: Option>, + + /// URL that the authorization server provides to the person registering the + /// client to read about the authorization server's requirements on how the + /// client can use the data provided by the authorization server. + pub op_policy_uri: Option, + + /// URL that the authorization server provides to the person registering the + /// client to read about the authorization server's terms of service. + pub op_tos_uri: Option, + + /// URL of the authorization server's [OAuth 2.0 revocation endpoint]. + /// + /// If this field is present, the URL must use a `https` scheme, and must + /// not contain a fragment. + /// + /// [OAuth 2.0 revocation endpoint]: https://www.rfc-editor.org/rfc/rfc7009 + pub revocation_endpoint: Option, + + /// JSON array containing a list of client authentication methods supported + /// by this revocation endpoint. + /// + /// Defaults to [`DEFAULT_AUTH_METHODS_SUPPORTED`]. + pub revocation_endpoint_auth_methods_supported: Option>, + + /// JSON array containing a list of the JWS signing algorithms supported by + /// the revocation endpoint for the signature on the JWT used to + /// authenticate the client at the revocation endpoint. + /// + /// If this field is present, it must not contain + /// [`JsonWebSignatureAlg::None`]. This field is required if + /// `revocation_endpoint_auth_methods_supported` contains + /// [`OAuthClientAuthenticationMethod::PrivateKeyJwt`] or + /// [`OAuthClientAuthenticationMethod::ClientSecretJwt`]. + pub revocation_endpoint_auth_signing_alg_values_supported: Option>, + + /// URL of the authorization server's [OAuth 2.0 introspection endpoint]. + /// + /// If this field is present, the URL must use a `https` scheme. + /// + /// [OAuth 2.0 introspection endpoint]: https://www.rfc-editor.org/rfc/rfc7662 + pub introspection_endpoint: Option, + + /// JSON array containing a list of client authentication methods or token + /// types supported by this introspection endpoint. + pub introspection_endpoint_auth_methods_supported: + Option>, + + /// JSON array containing a list of the JWS signing algorithms supported by + /// the introspection endpoint for the signature on the JWT used to + /// authenticate the client at the introspection endpoint. + /// + /// If this field is present, it must not contain + /// [`JsonWebSignatureAlg::None`]. This field is required if + /// `intospection_endpoint_auth_methods_supported` contains + /// [`OAuthClientAuthenticationMethod::PrivateKeyJwt`] or + /// [`OAuthClientAuthenticationMethod::ClientSecretJwt`]. + pub introspection_endpoint_auth_signing_alg_values_supported: Option>, + + /// [PKCE code challenge methods] supported by this authorization server. + /// If omitted, the authorization server does not support PKCE. + /// + /// [PKCE code challenge]: https://www.rfc-editor.org/rfc/rfc7636 + pub code_challenge_methods_supported: Option>, + + /// URL of the OP's [UserInfo Endpoint]. + /// + /// [UserInfo Endpoint]: https://openid.net/specs/openid-connect-core-1_0.html#UserInfo + pub userinfo_endpoint: Option, + + /// JSON array containing a list of the Authentication Context Class + /// References that this OP supports. + pub acr_values_supported: Option>, + + /// JSON array containing a list of the Subject Identifier types that this + /// OP supports. + /// + /// This field is required. + pub subject_types_supported: Option>, + + /// JSON array containing a list of the JWS signing algorithms (`alg` + /// values) supported by the OP for the ID Token. + /// + /// This field is required. + pub id_token_signing_alg_values_supported: Option>, + + /// JSON array containing a list of the JWE encryption algorithms (`alg` + /// values) supported by the OP for the ID Token. + pub id_token_encryption_alg_values_supported: Option>, + + /// JSON array containing a list of the JWE encryption algorithms (`enc` + /// values) supported by the OP for the ID Token. + pub id_token_encryption_enc_values_supported: Option>, + + /// JSON array containing a list of the JWS signing algorithms (`alg` + /// values) supported by the UserInfo Endpoint. + pub userinfo_signing_alg_values_supported: Option>, + + /// JSON array containing a list of the JWE encryption algorithms (`alg` + /// values) supported by the UserInfo Endpoint. + pub userinfo_encryption_alg_values_supported: Option>, + + /// JSON array containing a list of the JWE encryption algorithms (`enc` + /// values) supported by the UserInfo Endpoint. + pub userinfo_encryption_enc_values_supported: Option>, + + /// JSON array containing a list of the JWS signing algorithms (`alg` + /// values) supported by the OP for Request Objects. + pub request_object_signing_alg_values_supported: Option>, + + /// JSON array containing a list of the JWE encryption algorithms (`alg` + /// values) supported by the OP for Request Objects. + pub request_object_encryption_alg_values_supported: Option>, + + /// JSON array containing a list of the JWE encryption algorithms (`enc` + /// values) supported by the OP for Request Objects. + pub request_object_encryption_enc_values_supported: Option>, + + /// JSON array containing a list of the "display" parameter values that the + /// OpenID Provider supports. + pub display_values_supported: Option>, + + /// JSON array containing a list of the Claim Types that the OpenID Provider + /// supports. + /// + /// Defaults to [`DEFAULT_CLAIM_TYPES_SUPPORTED`]. + pub claim_types_supported: Option>, + + /// JSON array containing a list of the Claim Names of the Claims that the + /// OpenID Provider MAY be able to supply values for. + pub claims_supported: Option>, + + /// Languages and scripts supported for values in Claims being returned, + /// represented as a JSON array of BCP 47 language tag values. + pub claims_locales_supported: Option>, + + /// Boolean value specifying whether the OP supports use of the `claims` + /// parameter. + /// + /// Defaults to `false`. + pub claims_parameter_supported: Option, + + /// Boolean value specifying whether the OP supports use of the `request` + /// parameter. + /// + /// Defaults to `false`. + pub request_parameter_supported: Option, + + /// Boolean value specifying whether the OP supports use of the + /// `request_uri` parameter. + /// + /// Defaults to `true`. + pub request_uri_parameter_supported: Option, + + /// Boolean value specifying whether the OP requires any `request_uri` + /// values used to be pre-registered. + /// + /// Defaults to `false`. + pub require_request_uri_registration: Option, + + /// Indicates where authorization request needs to be protected as [Request + /// Object] and provided through either request or `request_uri` parameter. + /// + /// Defaults to `false`. + /// + /// [Request Object]: https://www.rfc-editor.org/rfc/rfc9101.html + pub require_signed_request_object: Option, + + /// URL of the authorization server's [pushed authorization request + /// endpoint]. + /// + /// [pushed authorization request endpoint]: https://www.rfc-editor.org/rfc/rfc9126.html + pub pushed_authorization_request_endpoint: Option, + + /// Indicates whether the authorization server accepts authorization + /// requests only via PAR. + /// + /// Defaults to `false`. + pub require_pushed_authorization_requests: Option, + + /// Array containing the list of prompt values that this OP supports. + /// + /// This field can be used to detect if the OP supports the [prompt + /// `create`] value. + /// + /// [prompt `create`]: https://openid.net/specs/openid-connect-prompt-create-1_0.html + pub prompt_values_supported: Option>, + + /// URL of the authorization server's [device authorization endpoint]. + /// + /// [device authorization endpoint]: https://www.rfc-editor.org/rfc/rfc8628 + pub device_authorization_endpoint: Option, + + /// URL of the authorization server's [RP-Initiated Logout endpoint]. + /// + /// [RP-Initiated Logout endpoint]: https://openid.net/specs/openid-connect-rpinitiated-1_0.html + pub end_session_endpoint: Option, + + /// URL where the user is able to access the account management capabilities + /// of this OP. + /// + /// This is a Matrix extension introduced in [MSC2965](https://github.com/matrix-org/matrix-spec-proposals/pull/2965). + pub account_management_uri: Option, + + /// Array of actions that the account management URL supports. + /// + /// This is a Matrix extension introduced in [MSC2965](https://github.com/matrix-org/matrix-spec-proposals/pull/2965). + pub account_management_actions_supported: Option>, +} + +impl ProviderMetadata { + /// Validate this `ProviderMetadata` according to the [OpenID Connect + /// Discovery Spec 1.0]. + /// + /// # Parameters + /// + /// - `issuer`: The issuer that was discovered to get this + /// `ProviderMetadata`. + /// + /// # Errors + /// + /// Will return `Err` if validation fails. + /// + /// [OpenID Connect Discovery Spec 1.0]: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata + pub fn validate( + self, + issuer: &str, + ) -> Result { + let metadata = self.insecure_verify_metadata()?; + + if metadata.issuer() != issuer { + return Err(ProviderMetadataVerificationError::IssuerUrlsDontMatch { + expected: issuer.to_owned(), + actual: metadata.issuer().to_owned(), + }); + } + + validate_url( + "issuer", + &metadata + .issuer() + .parse() + .map_err(|_| ProviderMetadataVerificationError::IssuerNotUrl)?, + ExtraUrlRestrictions::NoQueryOrFragment, + )?; + + validate_url( + "authorization_endpoint", + metadata.authorization_endpoint(), + ExtraUrlRestrictions::NoFragment, + )?; + + validate_url( + "token_endpoint", + metadata.token_endpoint(), + ExtraUrlRestrictions::NoFragment, + )?; + + validate_url("jwks_uri", metadata.jwks_uri(), ExtraUrlRestrictions::None)?; + + if let Some(url) = &metadata.registration_endpoint { + validate_url("registration_endpoint", url, ExtraUrlRestrictions::None)?; + } + + if let Some(scopes) = &metadata.scopes_supported + && !scopes.iter().any(|s| s == "openid") + { + return Err(ProviderMetadataVerificationError::ScopesMissingOpenid); + } + + validate_signing_alg_values_supported( + "token_endpoint", + metadata + .token_endpoint_auth_signing_alg_values_supported + .iter() + .flatten(), + )?; + + if let Some(url) = &metadata.revocation_endpoint { + validate_url("revocation_endpoint", url, ExtraUrlRestrictions::NoFragment)?; + } + + validate_signing_alg_values_supported( + "revocation_endpoint", + metadata + .revocation_endpoint_auth_signing_alg_values_supported + .iter() + .flatten(), + )?; + + if let Some(url) = &metadata.introspection_endpoint { + validate_url("introspection_endpoint", url, ExtraUrlRestrictions::None)?; + } + + validate_signing_alg_values_supported( + "introspection_endpoint", + metadata + .introspection_endpoint_auth_signing_alg_values_supported + .iter() + .flatten(), + )?; + + if let Some(url) = &metadata.userinfo_endpoint { + validate_url("userinfo_endpoint", url, ExtraUrlRestrictions::None)?; + } + + if let Some(url) = &metadata.pushed_authorization_request_endpoint { + validate_url( + "pushed_authorization_request_endpoint", + url, + ExtraUrlRestrictions::None, + )?; + } + + if let Some(url) = &metadata.end_session_endpoint { + validate_url("end_session_endpoint", url, ExtraUrlRestrictions::None)?; + } + + Ok(metadata) + } + + /// Verify this `ProviderMetadata`. + /// + /// Contrary to [`ProviderMetadata::validate()`], it only checks that the + /// required fields are present. + /// + /// This can be used during development to test against a local OpenID + /// Provider, for example. + /// + /// # Parameters + /// + /// - `issuer`: The issuer that was discovered to get this + /// `ProviderMetadata`. + /// + /// # Errors + /// + /// Will return `Err` if a required field is missing. + /// + /// # Warning + /// + /// It is not recommended to use this method in production as it doesn't + /// ensure that the issuer implements the proper security practices. + pub fn insecure_verify_metadata( + self, + ) -> Result { + self.issuer + .as_ref() + .ok_or(ProviderMetadataVerificationError::MissingIssuer)?; + + self.authorization_endpoint + .as_ref() + .ok_or(ProviderMetadataVerificationError::MissingAuthorizationEndpoint)?; + + self.token_endpoint + .as_ref() + .ok_or(ProviderMetadataVerificationError::MissingTokenEndpoint)?; + + self.jwks_uri + .as_ref() + .ok_or(ProviderMetadataVerificationError::MissingJwksUri)?; + + self.response_types_supported + .as_ref() + .ok_or(ProviderMetadataVerificationError::MissingResponseTypesSupported)?; + + self.subject_types_supported + .as_ref() + .ok_or(ProviderMetadataVerificationError::MissingSubjectTypesSupported)?; + + self.id_token_signing_alg_values_supported + .as_ref() + .ok_or(ProviderMetadataVerificationError::MissingIdTokenSigningAlgValuesSupported)?; + + Ok(VerifiedProviderMetadata { inner: self }) + } + + /// JSON array containing a list of the OAuth 2.0 `response_mode` values + /// that this authorization server supports. + /// + /// Defaults to [`DEFAULT_RESPONSE_MODES_SUPPORTED`]. + #[must_use] + pub fn response_modes_supported(&self) -> &[ResponseMode] { + self.response_modes_supported + .as_deref() + .unwrap_or(DEFAULT_RESPONSE_MODES_SUPPORTED) + } + + /// JSON array containing a list of the OAuth 2.0 grant type values that + /// this authorization server supports. + /// + /// Defaults to [`DEFAULT_GRANT_TYPES_SUPPORTED`]. + #[must_use] + pub fn grant_types_supported(&self) -> &[GrantType] { + self.grant_types_supported + .as_deref() + .unwrap_or(DEFAULT_GRANT_TYPES_SUPPORTED) + } + + /// JSON array containing a list of client authentication methods supported + /// by the token endpoint. + /// + /// Defaults to [`DEFAULT_AUTH_METHODS_SUPPORTED`]. + #[must_use] + pub fn token_endpoint_auth_methods_supported(&self) -> &[OAuthClientAuthenticationMethod] { + self.token_endpoint_auth_methods_supported + .as_deref() + .unwrap_or(DEFAULT_AUTH_METHODS_SUPPORTED) + } + + /// JSON array containing a list of client authentication methods supported + /// by the revocation endpoint. + /// + /// Defaults to [`DEFAULT_AUTH_METHODS_SUPPORTED`]. + #[must_use] + pub fn revocation_endpoint_auth_methods_supported(&self) -> &[OAuthClientAuthenticationMethod] { + self.revocation_endpoint_auth_methods_supported + .as_deref() + .unwrap_or(DEFAULT_AUTH_METHODS_SUPPORTED) + } + + /// JSON array containing a list of the Claim Types that the OpenID Provider + /// supports. + /// + /// Defaults to [`DEFAULT_CLAIM_TYPES_SUPPORTED`]. + #[must_use] + pub fn claim_types_supported(&self) -> &[ClaimType] { + self.claim_types_supported + .as_deref() + .unwrap_or(DEFAULT_CLAIM_TYPES_SUPPORTED) + } + + /// Boolean value specifying whether the OP supports use of the `claims` + /// parameter. + /// + /// Defaults to `false`. + #[must_use] + pub fn claims_parameter_supported(&self) -> bool { + self.claims_parameter_supported.unwrap_or(false) + } + + /// Boolean value specifying whether the OP supports use of the `request` + /// parameter. + /// + /// Defaults to `false`. + #[must_use] + pub fn request_parameter_supported(&self) -> bool { + self.request_parameter_supported.unwrap_or(false) + } + + /// Boolean value specifying whether the OP supports use of the + /// `request_uri` parameter. + /// + /// Defaults to `true`. + #[must_use] + pub fn request_uri_parameter_supported(&self) -> bool { + self.request_uri_parameter_supported.unwrap_or(true) + } + + /// Boolean value specifying whether the OP requires any `request_uri` + /// values used to be pre-registered. + /// + /// Defaults to `false`. + #[must_use] + pub fn require_request_uri_registration(&self) -> bool { + self.require_request_uri_registration.unwrap_or(false) + } + + /// Indicates where authorization request needs to be protected as Request + /// Object and provided through either `request` or `request_uri` parameter. + /// + /// Defaults to `false`. + #[must_use] + pub fn require_signed_request_object(&self) -> bool { + self.require_signed_request_object.unwrap_or(false) + } + + /// Indicates whether the authorization server accepts authorization + /// requests only via PAR. + /// + /// Defaults to `false`. + #[must_use] + pub fn require_pushed_authorization_requests(&self) -> bool { + self.require_pushed_authorization_requests.unwrap_or(false) + } +} + +/// The verified authorization server metadata. +/// +/// All the fields required by the [OpenID Connect Discovery Spec 1.0] or with +/// a default value are accessible via methods. +/// +/// To access other fields, use this type's `Deref` implementation. +/// +/// [OpenID Connect Discovery Spec 1.0]: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata +#[derive(Debug, Clone)] +pub struct VerifiedProviderMetadata { + inner: ProviderMetadata, +} + +impl VerifiedProviderMetadata { + /// Authorization server's issuer identifier URL. + #[must_use] + pub fn issuer(&self) -> &str { + match &self.issuer { + Some(u) => u, + None => unreachable!(), + } + } + + /// URL of the authorization server's authorization endpoint. + #[must_use] + pub fn authorization_endpoint(&self) -> &Url { + match &self.authorization_endpoint { + Some(u) => u, + None => unreachable!(), + } + } + + /// URL of the authorization server's userinfo endpoint. + #[must_use] + pub fn userinfo_endpoint(&self) -> &Url { + match &self.userinfo_endpoint { + Some(u) => u, + None => unreachable!(), + } + } + + /// URL of the authorization server's token endpoint. + #[must_use] + pub fn token_endpoint(&self) -> &Url { + match &self.token_endpoint { + Some(u) => u, + None => unreachable!(), + } + } + + /// URL of the authorization server's JWK Set document. + #[must_use] + pub fn jwks_uri(&self) -> &Url { + match &self.jwks_uri { + Some(u) => u, + None => unreachable!(), + } + } + + /// JSON array containing a list of the OAuth 2.0 `response_type` values + /// that this authorization server supports. + #[must_use] + pub fn response_types_supported(&self) -> &[ResponseType] { + match &self.response_types_supported { + Some(u) => u, + None => unreachable!(), + } + } + + /// JSON array containing a list of the Subject Identifier types that this + /// OP supports. + #[must_use] + pub fn subject_types_supported(&self) -> &[SubjectType] { + match &self.subject_types_supported { + Some(u) => u, + None => unreachable!(), + } + } + + /// JSON array containing a list of the JWS `alg` values supported by the OP + /// for the ID Token. + #[must_use] + pub fn id_token_signing_alg_values_supported(&self) -> &[JsonWebSignatureAlg] { + match &self.id_token_signing_alg_values_supported { + Some(u) => u, + None => unreachable!(), + } + } +} + +impl Deref for VerifiedProviderMetadata { + type Target = ProviderMetadata; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +/// All errors that can happen when verifying [`ProviderMetadata`] +#[derive(Debug, Error)] +pub enum ProviderMetadataVerificationError { + /// The issuer is missing. + #[error("issuer is missing")] + MissingIssuer, + + /// The issuer is not a valid URL. + #[error("issuer is not a valid URL")] + IssuerNotUrl, + + /// The authorization endpoint is missing. + #[error("authorization endpoint is missing")] + MissingAuthorizationEndpoint, + + /// The token endpoint is missing. + #[error("token endpoint is missing")] + MissingTokenEndpoint, + + /// The JWK Set URI is missing. + #[error("JWK Set URI is missing")] + MissingJwksUri, + + /// The supported response types are missing. + #[error("supported response types are missing")] + MissingResponseTypesSupported, + + /// The supported subject types are missing. + #[error("supported subject types are missing")] + MissingSubjectTypesSupported, + + /// The supported ID token signing algorithm values are missing. + #[error("supported ID token signing algorithm values are missing")] + MissingIdTokenSigningAlgValuesSupported, + + /// The URL of the given field doesn't use a `https` scheme. + #[error("{0}'s URL doesn't use a https scheme: {1}")] + UrlNonHttpsScheme(&'static str, Url), + + /// The URL of the given field contains a query, but it's not allowed. + #[error("{0}'s URL contains a query: {1}")] + UrlWithQuery(&'static str, Url), + + /// The URL of the given field contains a fragment, but it's not allowed. + #[error("{0}'s URL contains a fragment: {1}")] + UrlWithFragment(&'static str, Url), + + /// The issuer URL doesn't match the one that was discovered. + #[error("issuer URLs don't match: expected {expected:?}, got {actual:?}")] + IssuerUrlsDontMatch { + /// The expected issuer URL. + expected: String, + /// The issuer URL that was discovered. + actual: String, + }, + + /// `openid` is missing from the supported scopes. + #[error("missing openid scope")] + ScopesMissingOpenid, + + /// `code` is missing from the supported response types. + #[error("missing `code` response type")] + ResponseTypesMissingCode, + + /// `id_token` is missing from the supported response types. + #[error("missing `id_token` response type")] + ResponseTypesMissingIdToken, + + /// `id_token token` is missing from the supported response types. + #[error("missing `id_token token` response type")] + ResponseTypesMissingIdTokenToken, + + /// `authorization_code` is missing from the supported grant types. + #[error("missing `authorization_code` grant type")] + GrantTypesMissingAuthorizationCode, + + /// `implicit` is missing from the supported grant types. + #[error("missing `implicit` grant type")] + GrantTypesMissingImplicit, + + /// `none` is in the given endpoint's signing algorithm values, but is not + /// allowed. + #[error("{0} signing algorithm values contain `none`")] + SigningAlgValuesWithNone(&'static str), +} + +/// Possible extra restrictions on a URL. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +enum ExtraUrlRestrictions { + /// No extra restrictions. + None, + + /// The URL must not contain a fragment. + NoFragment, + + /// The URL must not contain a query or a fragment. + NoQueryOrFragment, +} + +impl ExtraUrlRestrictions { + fn can_have_fragment(self) -> bool { + self == Self::None + } + + fn can_have_query(self) -> bool { + self != Self::NoQueryOrFragment + } +} + +/// Validate the URL of the field with the given extra restrictions. +/// +/// The basic restriction is that the URL must use the `https` scheme. +fn validate_url( + field: &'static str, + url: &Url, + restrictions: ExtraUrlRestrictions, +) -> Result<(), ProviderMetadataVerificationError> { + if url.scheme() != "https" { + return Err(ProviderMetadataVerificationError::UrlNonHttpsScheme( + field, + url.clone(), + )); + } + + if !restrictions.can_have_query() && url.query().is_some() { + return Err(ProviderMetadataVerificationError::UrlWithQuery( + field, + url.clone(), + )); + } + + if !restrictions.can_have_fragment() && url.fragment().is_some() { + return Err(ProviderMetadataVerificationError::UrlWithFragment( + field, + url.clone(), + )); + } + + Ok(()) +} + +/// Validate the algorithm values of the endpoint according to the +/// authentication methods. +/// +/// The restrictions are: +/// - The algorithm values must not contain `none`, +/// - If the `client_secret_jwt` or `private_key_jwt` authentication methods are +/// supported, the values must be present. +fn validate_signing_alg_values_supported<'a>( + endpoint: &'static str, + values: impl Iterator, +) -> Result<(), ProviderMetadataVerificationError> { + for value in values { + if *value == JsonWebSignatureAlg::None { + return Err(ProviderMetadataVerificationError::SigningAlgValuesWithNone( + endpoint, + )); + } + } + Ok(()) +} + +/// The body of a request to the [RP-Initiated Logout Endpoint]. +/// +/// [RP-Initiated Logout Endpoint]: https://openid.net/specs/openid-connect-rpinitiated-1_0.html +#[skip_serializing_none] +#[serde_as] +#[derive(Default, Serialize, Deserialize, Clone)] +pub struct RpInitiatedLogoutRequest { + /// ID Token previously issued by the OP to the RP. + /// + /// Recommended, used as a hint about the End-User's current authenticated + /// session with the Client. + pub id_token_hint: Option, + + /// Hint to the Authorization Server about the End-User that is logging out. + /// + /// The value and meaning of this parameter is left up to the OP's + /// discretion. For instance, the value might contain an email address, + /// phone number, username, or session identifier pertaining to the RP's + /// session with the OP for the End-User. + pub logout_hint: Option, + + /// OAuth 2.0 Client Identifier valid at the Authorization Server. + /// + /// The most common use case for this parameter is to specify the Client + /// Identifier when `post_logout_redirect_uri` is used but `id_token_hint` + /// is not. Another use is for symmetrically encrypted ID Tokens used as + /// `id_token_hint` values that require the Client Identifier to be + /// specified by other means, so that the ID Tokens can be decrypted by + /// the OP. + pub client_id: Option, + + /// URI to which the RP is requesting that the End-User's User Agent be + /// redirected after a logout has been performed. + /// + /// The value MUST have been previously registered with the OP, using the + /// `post_logout_redirect_uris` registration parameter. + pub post_logout_redirect_uri: Option, + + /// Opaque value used by the RP to maintain state between the logout request + /// and the callback to the endpoint specified by the + /// `post_logout_redirect_uri` parameter. + pub state: Option, + + /// End-User's preferred languages and scripts for the user interface, + /// ordered by preference. + #[serde_as(as = "Option>")] + #[serde(default)] + pub ui_locales: Option>, +} + +impl fmt::Debug for RpInitiatedLogoutRequest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RpInitiatedLogoutRequest") + .field("logout_hint", &self.logout_hint) + .field("post_logout_redirect_uri", &self.post_logout_redirect_uri) + .field("ui_locales", &self.ui_locales) + .finish_non_exhaustive() + } +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use mas_iana::{ + jose::JsonWebSignatureAlg, + oauth::{OAuthAuthorizationEndpointResponseType, OAuthClientAuthenticationMethod}, + }; + use url::Url; + + use super::*; + + fn valid_provider_metadata() -> (ProviderMetadata, String) { + let issuer = "https://localhost".to_owned(); + let metadata = ProviderMetadata { + issuer: Some(issuer.clone()), + authorization_endpoint: Some(Url::parse("https://localhost/auth").unwrap()), + token_endpoint: Some(Url::parse("https://localhost/token").unwrap()), + jwks_uri: Some(Url::parse("https://localhost/jwks").unwrap()), + response_types_supported: Some(vec![ + OAuthAuthorizationEndpointResponseType::Code.into(), + ]), + subject_types_supported: Some(vec![SubjectType::Public]), + id_token_signing_alg_values_supported: Some(vec![JsonWebSignatureAlg::Rs256]), + ..Default::default() + }; + + (metadata, issuer) + } + + #[test] + fn validate_required_metadata() { + let (metadata, issuer) = valid_provider_metadata(); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_issuer() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Err - Missing + metadata.issuer = None; + assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::MissingIssuer) + ); + + // Err - Not an url + metadata.issuer = Some("not-an-url".to_owned()); + assert_matches!( + metadata.clone().validate("not-an-url"), + Err(ProviderMetadataVerificationError::IssuerNotUrl) + ); + + // Err - Wrong issuer + metadata.issuer = Some("https://example.com/".to_owned()); + assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::IssuerUrlsDontMatch { .. }) + ); + + // Err - Not https + let issuer = "http://localhost/".to_owned(); + metadata.issuer = Some(issuer.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "issuer"); + assert_eq!(url.as_str(), issuer); + + // Err - Query + let issuer = "https://localhost/?query".to_owned(); + metadata.issuer = Some(issuer.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlWithQuery(field, url)) => (field, url) + ); + assert_eq!(field, "issuer"); + assert_eq!(url.as_str(), issuer); + + // Err - Fragment + let issuer = "https://localhost/#fragment".to_owned(); + metadata.issuer = Some(issuer.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlWithFragment(field, url)) => (field, url) + ); + assert_eq!(field, "issuer"); + assert_eq!(url.as_str(), issuer); + + // Ok - Path + let issuer = "https://localhost/issuer1".to_owned(); + metadata.issuer = Some(issuer.clone()); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_authorization_endpoint() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Err - Missing + metadata.authorization_endpoint = None; + assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::MissingAuthorizationEndpoint) + ); + + // Err - Not https + let endpoint = Url::parse("http://localhost/auth").unwrap(); + metadata.authorization_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "authorization_endpoint"); + assert_eq!(url, endpoint); + + // Err - Fragment + let endpoint = Url::parse("https://localhost/auth#fragment").unwrap(); + metadata.authorization_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlWithFragment(field, url)) => (field, url) + ); + assert_eq!(field, "authorization_endpoint"); + assert_eq!(url, endpoint); + + // Ok - Query + metadata.authorization_endpoint = Some(Url::parse("https://localhost/auth?query").unwrap()); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_token_endpoint() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Err - Missing + metadata.token_endpoint = None; + assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::MissingTokenEndpoint) + ); + + // Err - Not https + let endpoint = Url::parse("http://localhost/token").unwrap(); + metadata.token_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "token_endpoint"); + assert_eq!(url, endpoint); + + // Err - Fragment + let endpoint = Url::parse("https://localhost/token#fragment").unwrap(); + metadata.token_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlWithFragment(field, url)) => (field, url) + ); + assert_eq!(field, "token_endpoint"); + assert_eq!(url, endpoint); + + // Ok - Query + metadata.token_endpoint = Some(Url::parse("https://localhost/token?query").unwrap()); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_jwks_uri() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Err - Missing + metadata.jwks_uri = None; + assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::MissingJwksUri) + ); + + // Err - Not https + let endpoint = Url::parse("http://localhost/jwks").unwrap(); + metadata.jwks_uri = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "jwks_uri"); + assert_eq!(url, endpoint); + + // Ok - Query & fragment + metadata.jwks_uri = Some(Url::parse("https://localhost/token?query#fragment").unwrap()); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_registration_endpoint() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Err - Not https + let endpoint = Url::parse("http://localhost/registration").unwrap(); + metadata.registration_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "registration_endpoint"); + assert_eq!(url, endpoint); + + // Ok - Missing + metadata.registration_endpoint = None; + metadata.clone().validate(&issuer).unwrap(); + + // Ok - Query & fragment + metadata.registration_endpoint = + Some(Url::parse("https://localhost/registration?query#fragment").unwrap()); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_scopes_supported() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Err - No `openid` + metadata.scopes_supported = Some(vec!["custom".to_owned()]); + assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::ScopesMissingOpenid) + ); + + // Ok - Missing + metadata.scopes_supported = None; + metadata.clone().validate(&issuer).unwrap(); + + // Ok - With `openid` + metadata.scopes_supported = Some(vec!["openid".to_owned(), "custom".to_owned()]); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_response_types_supported() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Err - Missing + metadata.response_types_supported = None; + assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::MissingResponseTypesSupported) + ); + + // Ok - Present + metadata.response_types_supported = + Some(vec![OAuthAuthorizationEndpointResponseType::Code.into()]); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_token_endpoint_signing_alg_values_supported() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Ok - Missing + metadata.token_endpoint_auth_signing_alg_values_supported = None; + metadata.token_endpoint_auth_methods_supported = None; + metadata.clone().validate(&issuer).unwrap(); + + // Err - With `none` + metadata.token_endpoint_auth_signing_alg_values_supported = + Some(vec![JsonWebSignatureAlg::None]); + let endpoint = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::SigningAlgValuesWithNone(endpoint)) => endpoint + ); + assert_eq!(endpoint, "token_endpoint"); + + // Ok - Other signing alg values. + metadata.token_endpoint_auth_signing_alg_values_supported = + Some(vec![JsonWebSignatureAlg::Rs256, JsonWebSignatureAlg::EdDsa]); + metadata.clone().validate(&issuer).unwrap(); + + // Ok - `client_secret_jwt` with signing alg values. + metadata.token_endpoint_auth_methods_supported = + Some(vec![OAuthClientAuthenticationMethod::ClientSecretJwt]); + metadata.token_endpoint_auth_signing_alg_values_supported = + Some(vec![JsonWebSignatureAlg::Rs256]); + metadata.clone().validate(&issuer).unwrap(); + + // Ok - `private_key_jwt` with signing alg values. + metadata.token_endpoint_auth_methods_supported = + Some(vec![OAuthClientAuthenticationMethod::PrivateKeyJwt]); + metadata.token_endpoint_auth_signing_alg_values_supported = + Some(vec![JsonWebSignatureAlg::Rs256]); + metadata.clone().validate(&issuer).unwrap(); + + // Ok - `client_secret_jwt` without signing alg values. + metadata.token_endpoint_auth_methods_supported = + Some(vec![OAuthClientAuthenticationMethod::ClientSecretJwt]); + metadata.token_endpoint_auth_signing_alg_values_supported = None; + metadata.clone().validate(&issuer).unwrap(); + + // Ok - `private_key_jwt` without signing alg values. + metadata.token_endpoint_auth_methods_supported = + Some(vec![OAuthClientAuthenticationMethod::PrivateKeyJwt]); + metadata.token_endpoint_auth_signing_alg_values_supported = None; + metadata.clone().validate(&issuer).unwrap(); + + // Ok - Other auth methods without signing alg values. + metadata.token_endpoint_auth_methods_supported = Some(vec![ + OAuthClientAuthenticationMethod::ClientSecretBasic, + OAuthClientAuthenticationMethod::ClientSecretPost, + ]); + metadata.token_endpoint_auth_signing_alg_values_supported = None; + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_revocation_endpoint() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Ok - Missing + metadata.revocation_endpoint = None; + metadata.clone().validate(&issuer).unwrap(); + + // Err - Not https + let endpoint = Url::parse("http://localhost/revocation").unwrap(); + metadata.revocation_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "revocation_endpoint"); + assert_eq!(url, endpoint); + + // Err - Fragment + let endpoint = Url::parse("https://localhost/revocation#fragment").unwrap(); + metadata.revocation_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlWithFragment(field, url)) => (field, url) + ); + assert_eq!(field, "revocation_endpoint"); + assert_eq!(url, endpoint); + + // Ok - Query + metadata.revocation_endpoint = + Some(Url::parse("https://localhost/revocation?query").unwrap()); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_revocation_endpoint_signing_alg_values_supported() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Only check that this field is validated, algorithm checks are already + // tested for the token endpoint. + + // Ok - Missing + metadata.revocation_endpoint_auth_signing_alg_values_supported = None; + metadata.revocation_endpoint_auth_methods_supported = None; + metadata.clone().validate(&issuer).unwrap(); + + // Err - With `none` + metadata.revocation_endpoint_auth_signing_alg_values_supported = + Some(vec![JsonWebSignatureAlg::None]); + let endpoint = assert_matches!( + metadata.validate(&issuer), + Err(ProviderMetadataVerificationError::SigningAlgValuesWithNone(endpoint)) => endpoint + ); + assert_eq!(endpoint, "revocation_endpoint"); + } + + #[test] + fn validate_introspection_endpoint() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Ok - Missing + metadata.introspection_endpoint = None; + metadata.clone().validate(&issuer).unwrap(); + + // Err - Not https + let endpoint = Url::parse("http://localhost/introspection").unwrap(); + metadata.introspection_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "introspection_endpoint"); + assert_eq!(url, endpoint); + + // Ok - Query & Fragment + metadata.introspection_endpoint = + Some(Url::parse("https://localhost/introspection?query#fragment").unwrap()); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_introspection_endpoint_signing_alg_values_supported() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Only check that this field is validated, algorithm checks are already + // tested for the token endpoint. + + // Ok - Missing + metadata.introspection_endpoint_auth_signing_alg_values_supported = None; + metadata.introspection_endpoint_auth_methods_supported = None; + metadata.clone().validate(&issuer).unwrap(); + + // Err - With `none` + metadata.introspection_endpoint_auth_signing_alg_values_supported = + Some(vec![JsonWebSignatureAlg::None]); + let endpoint = assert_matches!( + metadata.validate(&issuer), + Err(ProviderMetadataVerificationError::SigningAlgValuesWithNone(endpoint)) => endpoint + ); + assert_eq!(endpoint, "introspection_endpoint"); + } + + #[test] + fn validate_userinfo_endpoint() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Ok - Missing + metadata.userinfo_endpoint = None; + metadata.clone().validate(&issuer).unwrap(); + + // Err - Not https + let endpoint = Url::parse("http://localhost/userinfo").unwrap(); + metadata.userinfo_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "userinfo_endpoint"); + assert_eq!(url, endpoint); + + // Ok - Query & Fragment + metadata.userinfo_endpoint = + Some(Url::parse("https://localhost/userinfo?query#fragment").unwrap()); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_subject_types_supported() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Err - Missing + metadata.subject_types_supported = None; + assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::MissingSubjectTypesSupported) + ); + + // Ok - Present + metadata.subject_types_supported = Some(vec![SubjectType::Public, SubjectType::Pairwise]); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_id_token_signing_alg_values_supported() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Err - Missing + metadata.id_token_signing_alg_values_supported = None; + assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::MissingIdTokenSigningAlgValuesSupported) + ); + + // Ok - Present + metadata.id_token_signing_alg_values_supported = + Some(vec![JsonWebSignatureAlg::Rs256, JsonWebSignatureAlg::EdDsa]); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn validate_pushed_authorization_request_endpoint() { + let (mut metadata, issuer) = valid_provider_metadata(); + + // Ok - Missing + metadata.pushed_authorization_request_endpoint = None; + metadata.clone().validate(&issuer).unwrap(); + + // Err - Not https + let endpoint = Url::parse("http://localhost/par").unwrap(); + metadata.pushed_authorization_request_endpoint = Some(endpoint.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(&issuer), + Err(ProviderMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "pushed_authorization_request_endpoint"); + assert_eq!(url, endpoint); + + // Ok - Query & Fragment + metadata.pushed_authorization_request_endpoint = + Some(Url::parse("https://localhost/par?query#fragment").unwrap()); + metadata.validate(&issuer).unwrap(); + } + + #[test] + fn serialize_application_type() { + assert_eq!( + serde_json::to_string(&ApplicationType::Web).unwrap(), + "\"web\"" + ); + assert_eq!( + serde_json::to_string(&ApplicationType::Native).unwrap(), + "\"native\"" + ); + } + + #[test] + fn deserialize_application_type() { + assert_eq!( + serde_json::from_str::("\"web\"").unwrap(), + ApplicationType::Web + ); + assert_eq!( + serde_json::from_str::("\"native\"").unwrap(), + ApplicationType::Native + ); + } + + #[test] + fn serialize_subject_type() { + assert_eq!( + serde_json::to_string(&SubjectType::Public).unwrap(), + "\"public\"" + ); + assert_eq!( + serde_json::to_string(&SubjectType::Pairwise).unwrap(), + "\"pairwise\"" + ); + } + + #[test] + fn deserialize_subject_type() { + assert_eq!( + serde_json::from_str::("\"public\"").unwrap(), + SubjectType::Public + ); + assert_eq!( + serde_json::from_str::("\"pairwise\"").unwrap(), + SubjectType::Pairwise + ); + } + + #[test] + fn serialize_claim_type() { + assert_eq!( + serde_json::to_string(&ClaimType::Normal).unwrap(), + "\"normal\"" + ); + assert_eq!( + serde_json::to_string(&ClaimType::Aggregated).unwrap(), + "\"aggregated\"" + ); + assert_eq!( + serde_json::to_string(&ClaimType::Distributed).unwrap(), + "\"distributed\"" + ); + } + + #[test] + fn deserialize_claim_type() { + assert_eq!( + serde_json::from_str::("\"normal\"").unwrap(), + ClaimType::Normal + ); + assert_eq!( + serde_json::from_str::("\"aggregated\"").unwrap(), + ClaimType::Aggregated + ); + assert_eq!( + serde_json::from_str::("\"distributed\"").unwrap(), + ClaimType::Distributed + ); + } + + #[test] + fn deserialize_auth_method_or_token_type_type() { + assert_eq!( + serde_json::from_str::("\"none\"").unwrap(), + AuthenticationMethodOrAccessTokenType::AuthenticationMethod( + OAuthClientAuthenticationMethod::None + ) + ); + assert_eq!( + serde_json::from_str::("\"Bearer\"").unwrap(), + AuthenticationMethodOrAccessTokenType::AccessTokenType(OAuthAccessTokenType::Bearer) + ); + assert_eq!( + serde_json::from_str::("\"unknown_value\"") + .unwrap(), + AuthenticationMethodOrAccessTokenType::Unknown("unknown_value".to_owned()) + ); + } +} diff --git a/matrix-authentication-service/crates/oauth2-types/src/pkce.rs b/matrix-authentication-service/crates/oauth2-types/src/pkce.rs new file mode 100644 index 00000000..d806ae7f --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/pkce.rs @@ -0,0 +1,168 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Types for the [Proof Key for Code Exchange]. +//! +//! [Proof Key for Code Exchange]: https://www.rfc-editor.org/rfc/rfc7636 + +use std::borrow::Cow; + +use base64ct::{Base64UrlUnpadded, Encoding}; +use mas_iana::oauth::PkceCodeChallengeMethod; +use serde::{Deserialize, Serialize}; +use sha2::{Digest, Sha256}; +use thiserror::Error; + +/// Errors that can occur when verifying a code challenge. +#[derive(Debug, Error, PartialEq, Eq)] +pub enum CodeChallengeError { + /// The code verifier should be at least 43 characters long. + #[error("code_verifier should be at least 43 characters long")] + TooShort, + + /// The code verifier should be at most 128 characters long. + #[error("code_verifier should be at most 128 characters long")] + TooLong, + + /// The code verifier contains invalid characters. + #[error("code_verifier contains invalid characters")] + InvalidCharacters, + + /// The challenge verification failed. + #[error("challenge verification failed")] + VerificationFailed, + + /// The challenge method is unsupported. + #[error("unknown challenge method")] + UnknownChallengeMethod, +} + +fn validate_verifier(verifier: &str) -> Result<(), CodeChallengeError> { + if verifier.len() < 43 { + return Err(CodeChallengeError::TooShort); + } + + if verifier.len() > 128 { + return Err(CodeChallengeError::TooLong); + } + + if !verifier + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '.' || c == '_' || c == '~') + { + return Err(CodeChallengeError::InvalidCharacters); + } + + Ok(()) +} + +/// Helper trait to compute and verify code challenges. +pub trait CodeChallengeMethodExt { + /// Compute the challenge for a given verifier + /// + /// # Errors + /// + /// Returns an error if the verifier did not adhere to the rules defined by + /// the RFC in terms of length and allowed characters + fn compute_challenge<'a>(&self, verifier: &'a str) -> Result, CodeChallengeError>; + + /// Verify that a given verifier is valid for the given challenge + /// + /// # Errors + /// + /// Returns an error if the verifier did not match the challenge, or if the + /// verifier did not adhere to the rules defined by the RFC in terms of + /// length and allowed characters + fn verify(&self, challenge: &str, verifier: &str) -> Result<(), CodeChallengeError> + where + Self: Sized, + { + if self.compute_challenge(verifier)? == challenge { + Ok(()) + } else { + Err(CodeChallengeError::VerificationFailed) + } + } +} + +impl CodeChallengeMethodExt for PkceCodeChallengeMethod { + fn compute_challenge<'a>(&self, verifier: &'a str) -> Result, CodeChallengeError> { + validate_verifier(verifier)?; + + let challenge = match self { + Self::Plain => verifier.into(), + Self::S256 => { + let mut hasher = Sha256::new(); + hasher.update(verifier.as_bytes()); + let hash = hasher.finalize(); + let verifier = Base64UrlUnpadded::encode_string(&hash); + verifier.into() + } + _ => return Err(CodeChallengeError::UnknownChallengeMethod), + }; + + Ok(challenge) + } +} + +/// The code challenge data added to an authorization request. +#[derive(Clone, Serialize, Deserialize)] +pub struct AuthorizationRequest { + /// The code challenge method. + pub code_challenge_method: PkceCodeChallengeMethod, + + /// The code challenge computed from the verifier and the method. + pub code_challenge: String, +} + +/// The code challenge data added to a token request. +#[derive(Clone, Serialize, Deserialize)] +pub struct TokenRequest { + /// The code challenge verifier. + pub code_challenge_verifier: String, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_pkce_verification() { + use PkceCodeChallengeMethod::{Plain, S256}; + // This challenge comes from the RFC7636 appendices + let challenge = "E9Melhoa2OwvFrEMTJguCHaoeK1t8URWbuGJSstw-cM"; + + assert!( + S256.verify(challenge, "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk") + .is_ok() + ); + + assert!(Plain.verify(challenge, challenge).is_ok()); + + assert_eq!( + S256.verify(challenge, "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"), + Err(CodeChallengeError::VerificationFailed), + ); + + assert_eq!( + S256.verify(challenge, "tooshort"), + Err(CodeChallengeError::TooShort), + ); + + assert_eq!( + S256.verify(challenge, "toolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolong"), + Err(CodeChallengeError::TooLong), + ); + + assert_eq!( + S256.verify( + challenge, + "this is long enough but has invalid characters in it" + ), + Err(CodeChallengeError::InvalidCharacters), + ); + } +} diff --git a/matrix-authentication-service/crates/oauth2-types/src/registration/client_metadata_serde.rs b/matrix-authentication-service/crates/oauth2-types/src/registration/client_metadata_serde.rs new file mode 100644 index 00000000..8edfcc6c --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/registration/client_metadata_serde.rs @@ -0,0 +1,501 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::borrow::Cow; + +use chrono::Duration; +use indexmap::IndexMap; +use language_tags::LanguageTag; +use mas_iana::{ + jose::{JsonWebEncryptionAlg, JsonWebEncryptionEnc, JsonWebSignatureAlg}, + oauth::OAuthClientAuthenticationMethod, +}; +use mas_jose::jwk::PublicJsonWebKeySet; +use serde::{ + Deserialize, Serialize, + de::{DeserializeOwned, Error}, + ser::SerializeMap, +}; +use serde_json::Value; +use serde_with::{DurationSeconds, serde_as, skip_serializing_none}; +use url::Url; + +use super::{ClientMetadata, Localized, VerifiedClientMetadata}; +use crate::{ + oidc::{ApplicationType, SubjectType}, + requests::GrantType, + response_type::ResponseType, +}; + +impl Localized { + fn serialize(&self, map: &mut M, field_name: &str) -> Result<(), M::Error> + where + M: SerializeMap, + T: Serialize, + { + map.serialize_entry(field_name, &self.non_localized)?; + + for (lang, localized) in &self.localized { + map.serialize_entry(&format!("{field_name}#{lang}"), localized)?; + } + + Ok(()) + } + + fn deserialize( + map: &mut IndexMap, Value>>, + field_name: &'static str, + ) -> Result, serde_json::Error> + where + T: DeserializeOwned, + { + let Some(map) = map.shift_remove(field_name) else { + return Ok(None); + }; + + let mut non_localized = None; + let mut localized = IndexMap::with_capacity(map.len() - 1); + + for (k, v) in map { + let value = serde_json::from_value(v)?; + + if let Some(lang) = k { + localized.insert(lang, value); + } else { + non_localized = Some(value); + } + } + + let non_localized = non_localized.ok_or_else(|| { + serde_json::Error::custom(format!( + "missing non-localized variant of field '{field_name}'" + )) + })?; + + Ok(Some(Localized { + non_localized, + localized, + })) + } + + /// Sort the localized keys. This is inteded to ensure a stable + /// serialization order when needed. + pub(super) fn sort(&mut self) { + self.localized + .sort_unstable_by(|k1, _v1, k2, _v2| k1.as_str().cmp(k2.as_str())); + } +} + +#[serde_as] +#[skip_serializing_none] +#[derive(Serialize, Deserialize)] +pub struct ClientMetadataSerdeHelper { + redirect_uris: Option>, + response_types: Option>, + grant_types: Option>, + application_type: Option, + contacts: Option>, + jwks_uri: Option, + jwks: Option, + software_id: Option, + software_version: Option, + sector_identifier_uri: Option, + subject_type: Option, + token_endpoint_auth_method: Option, + token_endpoint_auth_signing_alg: Option, + id_token_signed_response_alg: Option, + id_token_encrypted_response_alg: Option, + id_token_encrypted_response_enc: Option, + userinfo_signed_response_alg: Option, + userinfo_encrypted_response_alg: Option, + userinfo_encrypted_response_enc: Option, + request_object_signing_alg: Option, + request_object_encryption_alg: Option, + request_object_encryption_enc: Option, + #[serde_as(as = "Option>")] + default_max_age: Option, + require_auth_time: Option, + default_acr_values: Option>, + initiate_login_uri: Option, + request_uris: Option>, + require_signed_request_object: Option, + require_pushed_authorization_requests: Option, + introspection_signed_response_alg: Option, + introspection_encrypted_response_alg: Option, + introspection_encrypted_response_enc: Option, + post_logout_redirect_uris: Option>, + #[serde(flatten)] + extra: ClientMetadataLocalizedFields, +} + +impl From for ClientMetadataSerdeHelper { + fn from(metadata: VerifiedClientMetadata) -> Self { + metadata.inner.into() + } +} + +impl From for ClientMetadataSerdeHelper { + fn from(metadata: ClientMetadata) -> Self { + let ClientMetadata { + redirect_uris, + response_types, + grant_types, + application_type, + contacts, + client_name, + logo_uri, + client_uri, + policy_uri, + tos_uri, + jwks_uri, + jwks, + software_id, + software_version, + sector_identifier_uri, + subject_type, + token_endpoint_auth_method, + token_endpoint_auth_signing_alg, + id_token_signed_response_alg, + id_token_encrypted_response_alg, + id_token_encrypted_response_enc, + userinfo_signed_response_alg, + userinfo_encrypted_response_alg, + userinfo_encrypted_response_enc, + request_object_signing_alg, + request_object_encryption_alg, + request_object_encryption_enc, + default_max_age, + require_auth_time, + default_acr_values, + initiate_login_uri, + request_uris, + require_signed_request_object, + require_pushed_authorization_requests, + introspection_signed_response_alg, + introspection_encrypted_response_alg, + introspection_encrypted_response_enc, + post_logout_redirect_uris, + } = metadata; + + ClientMetadataSerdeHelper { + redirect_uris, + response_types, + grant_types, + application_type, + contacts, + jwks_uri, + jwks, + software_id, + software_version, + sector_identifier_uri, + subject_type, + token_endpoint_auth_method, + token_endpoint_auth_signing_alg, + id_token_signed_response_alg, + id_token_encrypted_response_alg, + id_token_encrypted_response_enc, + userinfo_signed_response_alg, + userinfo_encrypted_response_alg, + userinfo_encrypted_response_enc, + request_object_signing_alg, + request_object_encryption_alg, + request_object_encryption_enc, + default_max_age, + require_auth_time, + default_acr_values, + initiate_login_uri, + request_uris, + require_signed_request_object, + require_pushed_authorization_requests, + introspection_signed_response_alg, + introspection_encrypted_response_alg, + introspection_encrypted_response_enc, + post_logout_redirect_uris, + extra: ClientMetadataLocalizedFields { + client_name, + logo_uri, + client_uri, + policy_uri, + tos_uri, + }, + } + } +} + +impl From for ClientMetadata { + fn from(metadata: ClientMetadataSerdeHelper) -> Self { + let ClientMetadataSerdeHelper { + redirect_uris, + response_types, + grant_types, + application_type, + contacts, + jwks_uri, + jwks, + software_id, + software_version, + sector_identifier_uri, + subject_type, + token_endpoint_auth_method, + token_endpoint_auth_signing_alg, + id_token_signed_response_alg, + id_token_encrypted_response_alg, + id_token_encrypted_response_enc, + userinfo_signed_response_alg, + userinfo_encrypted_response_alg, + userinfo_encrypted_response_enc, + request_object_signing_alg, + request_object_encryption_alg, + request_object_encryption_enc, + default_max_age, + require_auth_time, + default_acr_values, + initiate_login_uri, + request_uris, + require_signed_request_object, + require_pushed_authorization_requests, + introspection_signed_response_alg, + introspection_encrypted_response_alg, + introspection_encrypted_response_enc, + post_logout_redirect_uris, + extra: + ClientMetadataLocalizedFields { + client_name, + logo_uri, + client_uri, + policy_uri, + tos_uri, + }, + } = metadata; + + ClientMetadata { + redirect_uris, + response_types, + grant_types, + application_type, + contacts, + client_name, + logo_uri, + client_uri, + policy_uri, + tos_uri, + jwks_uri, + jwks, + software_id, + software_version, + sector_identifier_uri, + subject_type, + token_endpoint_auth_method, + token_endpoint_auth_signing_alg, + id_token_signed_response_alg, + id_token_encrypted_response_alg, + id_token_encrypted_response_enc, + userinfo_signed_response_alg, + userinfo_encrypted_response_alg, + userinfo_encrypted_response_enc, + request_object_signing_alg, + request_object_encryption_alg, + request_object_encryption_enc, + default_max_age, + require_auth_time, + default_acr_values, + initiate_login_uri, + request_uris, + require_signed_request_object, + require_pushed_authorization_requests, + introspection_signed_response_alg, + introspection_encrypted_response_alg, + introspection_encrypted_response_enc, + post_logout_redirect_uris, + } + } +} + +struct ClientMetadataLocalizedFields { + client_name: Option>, + logo_uri: Option>, + client_uri: Option>, + policy_uri: Option>, + tos_uri: Option>, +} + +impl Serialize for ClientMetadataLocalizedFields { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut map = serializer.serialize_map(None)?; + + if let Some(client_name) = &self.client_name { + client_name.serialize(&mut map, "client_name")?; + } + + if let Some(logo_uri) = &self.logo_uri { + logo_uri.serialize(&mut map, "logo_uri")?; + } + + if let Some(client_uri) = &self.client_uri { + client_uri.serialize(&mut map, "client_uri")?; + } + + if let Some(policy_uri) = &self.policy_uri { + policy_uri.serialize(&mut map, "policy_uri")?; + } + + if let Some(tos_uri) = &self.tos_uri { + tos_uri.serialize(&mut map, "tos_uri")?; + } + + map.end() + } +} + +impl<'de> Deserialize<'de> for ClientMetadataLocalizedFields { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let map = IndexMap::, Value>::deserialize(deserializer)?; + let mut new_map: IndexMap, Value>> = IndexMap::new(); + + for (k, v) in map { + let (prefix, lang) = if let Some((prefix, lang)) = k.split_once('#') { + let lang = LanguageTag::parse(lang).map_err(|_| { + D::Error::invalid_value(serde::de::Unexpected::Str(lang), &"language tag") + })?; + (prefix.to_owned(), Some(lang)) + } else { + (k.into_owned(), None) + }; + + new_map.entry(prefix).or_default().insert(lang, v); + } + + let client_name = + Localized::deserialize(&mut new_map, "client_name").map_err(D::Error::custom)?; + + let logo_uri = + Localized::deserialize(&mut new_map, "logo_uri").map_err(D::Error::custom)?; + + let client_uri = + Localized::deserialize(&mut new_map, "client_uri").map_err(D::Error::custom)?; + + let policy_uri = + Localized::deserialize(&mut new_map, "policy_uri").map_err(D::Error::custom)?; + + let tos_uri = Localized::deserialize(&mut new_map, "tos_uri").map_err(D::Error::custom)?; + + Ok(Self { + client_name, + logo_uri, + client_uri, + policy_uri, + tos_uri, + }) + } +} + +#[cfg(test)] +mod tests { + use insta::assert_yaml_snapshot; + + use super::*; + + #[test] + fn deserialize_localized_fields() { + let metadata = serde_json::json!({ + "redirect_uris": ["http://localhost/oidc"], + "client_name": "Postbox", + "client_name#fr": "Boîte à lettres", + "client_uri": "https://localhost/", + "client_uri#fr": "https://localhost/fr", + "client_uri#de": "https://localhost/de", + }); + + let metadata: ClientMetadata = serde_json::from_value(metadata).unwrap(); + + let name = metadata.client_name.unwrap(); + assert_eq!(name.non_localized(), "Postbox"); + assert_eq!( + name.get(Some(&LanguageTag::parse("fr").unwrap())).unwrap(), + "Boîte à lettres" + ); + assert_eq!(name.get(Some(&LanguageTag::parse("de").unwrap())), None); + + let client_uri = metadata.client_uri.unwrap(); + assert_eq!(client_uri.non_localized().as_ref(), "https://localhost/"); + assert_eq!( + client_uri + .get(Some(&LanguageTag::parse("fr").unwrap())) + .unwrap() + .as_ref(), + "https://localhost/fr" + ); + assert_eq!( + client_uri + .get(Some(&LanguageTag::parse("de").unwrap())) + .unwrap() + .as_ref(), + "https://localhost/de" + ); + } + + #[test] + fn serialize_localized_fields() { + let client_name = Localized::new( + "Postbox".to_owned(), + [( + LanguageTag::parse("fr").unwrap(), + "Boîte à lettres".to_owned(), + )], + ); + let client_uri = Localized::new( + Url::parse("https://localhost").unwrap(), + [ + ( + LanguageTag::parse("fr").unwrap(), + Url::parse("https://localhost/fr").unwrap(), + ), + ( + LanguageTag::parse("de").unwrap(), + Url::parse("https://localhost/de").unwrap(), + ), + ], + ); + let metadata = ClientMetadata { + redirect_uris: Some(vec![Url::parse("http://localhost/oidc").unwrap()]), + client_name: Some(client_name), + client_uri: Some(client_uri), + ..Default::default() + } + .validate() + .unwrap(); + + assert_yaml_snapshot!(metadata, @r###" + redirect_uris: + - "http://localhost/oidc" + client_name: Postbox + "client_name#fr": Boîte à lettres + client_uri: "https://localhost/" + "client_uri#fr": "https://localhost/fr" + "client_uri#de": "https://localhost/de" + "###); + + // Do a roundtrip, we should get the same metadata back with the same order + let metadata: ClientMetadata = + serde_json::from_value(serde_json::to_value(metadata).unwrap()).unwrap(); + let metadata = metadata.validate().unwrap(); + assert_yaml_snapshot!(metadata, @r###" + redirect_uris: + - "http://localhost/oidc" + client_name: Postbox + "client_name#fr": Boîte à lettres + client_uri: "https://localhost/" + "client_uri#fr": "https://localhost/fr" + "client_uri#de": "https://localhost/de" + "###); + } +} diff --git a/matrix-authentication-service/crates/oauth2-types/src/registration/mod.rs b/matrix-authentication-service/crates/oauth2-types/src/registration/mod.rs new file mode 100644 index 00000000..e6b6aa86 --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/registration/mod.rs @@ -0,0 +1,1423 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Types for [Dynamic Client Registration]. +//! +//! [Dynamic Client Registration]: https://openid.net/specs/openid-connect-registration-1_0.html + +use std::ops::Deref; + +use chrono::{DateTime, Duration, Utc}; +use indexmap::IndexMap; +use language_tags::LanguageTag; +use mas_iana::{ + jose::{JsonWebEncryptionAlg, JsonWebEncryptionEnc, JsonWebSignatureAlg}, + oauth::{OAuthAuthorizationEndpointResponseType, OAuthClientAuthenticationMethod}, +}; +use mas_jose::jwk::PublicJsonWebKeySet; +use serde::{Deserialize, Serialize}; +use serde_with::{TimestampSeconds, serde_as, skip_serializing_none}; +use thiserror::Error; +use url::Url; + +use crate::{ + oidc::{ApplicationType, SubjectType}, + requests::GrantType, + response_type::ResponseType, +}; + +mod client_metadata_serde; +use client_metadata_serde::ClientMetadataSerdeHelper; + +/// The default value of `response_types` if it is not set. +pub const DEFAULT_RESPONSE_TYPES: [OAuthAuthorizationEndpointResponseType; 1] = + [OAuthAuthorizationEndpointResponseType::Code]; + +/// The default value of `grant_types` if it is not set. +pub const DEFAULT_GRANT_TYPES: &[GrantType] = &[GrantType::AuthorizationCode]; + +/// The default value of `application_type` if it is not set. +pub const DEFAULT_APPLICATION_TYPE: ApplicationType = ApplicationType::Web; + +/// The default value of `token_endpoint_auth_method` if it is not set. +pub const DEFAULT_TOKEN_AUTH_METHOD: &OAuthClientAuthenticationMethod = + &OAuthClientAuthenticationMethod::ClientSecretBasic; + +/// The default value of `id_token_signed_response_alg` if it is not set. +pub const DEFAULT_SIGNING_ALGORITHM: &JsonWebSignatureAlg = &JsonWebSignatureAlg::Rs256; + +/// The default value of `id_token_encrypted_response_enc` if it is not set. +pub const DEFAULT_ENCRYPTION_ENC_ALGORITHM: &JsonWebEncryptionEnc = + &JsonWebEncryptionEnc::A128CbcHs256; + +/// A collection of localized variants. +/// +/// Always includes one non-localized variant. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Localized { + non_localized: T, + localized: IndexMap, +} + +impl Localized { + /// Constructs a new `Localized` with the given non-localized and localized + /// variants. + pub fn new(non_localized: T, localized: impl IntoIterator) -> Self { + Self { + non_localized, + localized: localized.into_iter().collect(), + } + } + + /// Returns the number of variants. + #[allow(clippy::len_without_is_empty)] + pub fn len(&self) -> usize { + self.localized.len() + 1 + } + + /// Get the non-localized variant. + pub fn non_localized(&self) -> &T { + &self.non_localized + } + + /// Get the non-localized variant. + pub fn to_non_localized(self) -> T { + self.non_localized + } + + /// Get the variant corresponding to the given language, if it exists. + pub fn get(&self, language: Option<&LanguageTag>) -> Option<&T> { + match language { + Some(lang) => self.localized.get(lang), + None => Some(&self.non_localized), + } + } + + /// Get an iterator over the variants. + pub fn iter(&self) -> impl Iterator, &T)> { + Some(&self.non_localized) + .into_iter() + .map(|val| (None, val)) + .chain(self.localized.iter().map(|(lang, val)| (Some(lang), val))) + } +} + +impl From<(T, IndexMap)> for Localized { + fn from(t: (T, IndexMap)) -> Self { + Localized { + non_localized: t.0, + localized: t.1, + } + } +} + +/// Client metadata, as described by the [IANA registry]. +/// +/// All the fields with a default value are accessible via methods. +/// +/// [IANA registry]: https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#client-metadata +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone, Default)] +#[serde(from = "ClientMetadataSerdeHelper", into = "ClientMetadataSerdeHelper")] +pub struct ClientMetadata { + /// Array of redirection URIs for use in redirect-based flows such as the + /// [authorization code flow]. + /// + /// All the URIs used by the client in an authorization request's + /// `redirect_uri` field must appear in this list. + /// + /// This field is required and the URIs must not contain a fragment. + /// + /// [authorization code flow]: https://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth + pub redirect_uris: Option>, + + /// Array of the [OAuth 2.0 `response_type` values] that the client can use + /// at the [authorization endpoint]. + /// + /// All the types used by the client in an authorization request's + /// `response_type` field must appear in this list. + /// + /// Defaults to [`DEFAULT_RESPONSE_TYPES`]. + /// + /// [OAuth 2.0 `response_type` values]: https://www.rfc-editor.org/rfc/rfc7591#page-9 + /// [authorization endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.1 + pub response_types: Option>, + + /// Array of [OAuth 2.0 `grant_type` values] that the client can use at the + /// [token endpoint]. + /// + /// The possible grant types depend on the response types. Declaring support + /// for a grant type that is not compatible with the supported response + /// types will trigger an error during validation. + /// + /// All the types used by the client in a token request's `grant_type` field + /// must appear in this list. + /// + /// Defaults to [`DEFAULT_GRANT_TYPES`]. + /// + /// [OAuth 2.0 `grant_type` values]: https://www.rfc-editor.org/rfc/rfc7591#page-9 + /// [token endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.2 + pub grant_types: Option>, + + /// The kind of the application. + /// + /// Defaults to [`DEFAULT_APPLICATION_TYPE`]. + pub application_type: Option, + + /// Array of e-mail addresses of people responsible for this client. + pub contacts: Option>, + + /// Name of the client to be presented to the end-user during authorization. + pub client_name: Option>, + + /// URL that references a logo for the client application. + pub logo_uri: Option>, + + /// URL of the home page of the client. + pub client_uri: Option>, + + /// URL that the client provides to the end-user to read about the how the + /// profile data will be used. + pub policy_uri: Option>, + + /// URL that the client provides to the end-user to read about the client's + /// terms of service. + pub tos_uri: Option>, + + /// URL for the client's [JWK] Set document. + /// + /// If the client signs requests to the server, it contains the signing + /// key(s) the server uses to validate signatures from the client. The JWK + /// Set may also contain the client's encryption keys(s), which are used by + /// the server to encrypt responses to the client. + /// + /// This field is mutually exclusive with `jwks`. + /// + /// [JWK]: https://www.rfc-editor.org/rfc/rfc7517.html + pub jwks_uri: Option, + + /// Client's [JWK] Set document, passed by value. + /// + /// The semantics of this field are the same as `jwks_uri`, other than that + /// the JWK Set is passed by value, rather than by reference. + /// + /// This field is mutually exclusive with `jwks_uri`. + /// + /// [JWK]: https://www.rfc-editor.org/rfc/rfc7517.html + pub jwks: Option, + + /// A unique identifier string assigned by the client developer or software + /// publisher used by registration endpoints to identify the client software + /// to be dynamically registered. + /// + /// It should remain the same for all instances and versions of the client + /// software. + pub software_id: Option, + + /// A version identifier string for the client software identified by + /// `software_id`. + pub software_version: Option, + + /// URL to be used in calculating pseudonymous identifiers by the OpenID + /// Connect provider when [pairwise subject identifiers] are used. + /// + /// If present, this must use the `https` scheme. + /// + /// [pairwise subject identifiers]: https://openid.net/specs/openid-connect-core-1_0.html#PairwiseAlg + pub sector_identifier_uri: Option, + + /// Subject type requested for responses to this client. + /// + /// This field must match one of the supported types by the provider. + pub subject_type: Option, + + /// Requested client authentication method for the [token endpoint]. + /// + /// If this is set to [`OAuthClientAuthenticationMethod::PrivateKeyJwt`], + /// one of the `jwks_uri` or `jwks` fields is required. + /// + /// Defaults to [`DEFAULT_TOKEN_AUTH_METHOD`]. + /// + /// [token endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.2 + pub token_endpoint_auth_method: Option, + + /// [JWS] `alg` algorithm that must be used for signing the [JWT] used to + /// authenticate the client at the token endpoint. + /// + /// If this field is present, it must not be + /// [`JsonWebSignatureAlg::None`]. This field is required if + /// `token_endpoint_auth_method` is one of + /// [`OAuthClientAuthenticationMethod::PrivateKeyJwt`] or + /// [`OAuthClientAuthenticationMethod::ClientSecretJwt`]. + /// + /// [JWS]: http://tools.ietf.org/html/draft-ietf-jose-json-web-signature + /// [JWT]: http://tools.ietf.org/html/draft-ietf-oauth-json-web-token + pub token_endpoint_auth_signing_alg: Option, + + /// [JWS] `alg` algorithm required for signing the ID Token issued to this + /// client. + /// + /// If this field is present, it must not be + /// [`JsonWebSignatureAlg::None`], unless the client uses only response + /// types that return no ID Token from the authorization endpoint. + /// + /// Defaults to [`DEFAULT_SIGNING_ALGORITHM`]. + /// + /// [JWS]: http://tools.ietf.org/html/draft-ietf-jose-json-web-signature + pub id_token_signed_response_alg: Option, + + /// [JWE] `alg` algorithm required for encrypting the ID Token issued to + /// this client. + /// + /// This field is required if `id_token_encrypted_response_enc` is provided. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + pub id_token_encrypted_response_alg: Option, + + /// [JWE] `enc` algorithm required for encrypting the ID Token issued to + /// this client. + /// + /// Defaults to [`DEFAULT_ENCRYPTION_ENC_ALGORITHM`] if + /// `id_token_encrypted_response_alg` is provided. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + pub id_token_encrypted_response_enc: Option, + + /// [JWS] `alg` algorithm required for signing user info responses. + /// + /// [JWS]: http://tools.ietf.org/html/draft-ietf-jose-json-web-signature + pub userinfo_signed_response_alg: Option, + + /// [JWE] `alg` algorithm required for encrypting user info responses. + /// + /// If `userinfo_signed_response_alg` is not provided, this field has no + /// effect. + /// + /// This field is required if `userinfo_encrypted_response_enc` is provided. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + pub userinfo_encrypted_response_alg: Option, + + /// [JWE] `enc` algorithm required for encrypting user info responses. + /// + /// If `userinfo_signed_response_alg` is not provided, this field has no + /// effect. + /// + /// Defaults to [`DEFAULT_ENCRYPTION_ENC_ALGORITHM`] if + /// `userinfo_encrypted_response_alg` is provided. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + pub userinfo_encrypted_response_enc: Option, + + /// [JWS] `alg` algorithm that must be used for signing Request Objects sent + /// to the provider. + /// + /// Defaults to any algorithm supported by the client and the provider. + /// + /// [JWS]: http://tools.ietf.org/html/draft-ietf-jose-json-web-signature + pub request_object_signing_alg: Option, + + /// [JWE] `alg` algorithm the client is declaring that it may use for + /// encrypting Request Objects sent to the provider. + /// + /// This field is required if `request_object_encryption_enc` is provided. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + pub request_object_encryption_alg: Option, + + /// [JWE] `enc` algorithm the client is declaring that it may use for + /// encrypting Request Objects sent to the provider. + /// + /// Defaults to [`DEFAULT_ENCRYPTION_ENC_ALGORITHM`] if + /// `request_object_encryption_alg` is provided. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + pub request_object_encryption_enc: Option, + + /// Default maximum authentication age. + /// + /// Specifies that the End-User must be actively authenticated if the + /// end-user was authenticated longer ago than the specified number of + /// seconds. + /// + /// The `max_age` request parameter overrides this default value. + pub default_max_age: Option, + + /// Whether the `auth_time` Claim in the ID Token is required. + /// + /// Defaults to `false`. + pub require_auth_time: Option, + + /// Default requested Authentication Context Class Reference values. + pub default_acr_values: Option>, + + /// URI that a third party can use to [initiate a login by the client]. + /// + /// If present, this must use the `https` scheme. + /// + /// [initiate a login by the client]: https://openid.net/specs/openid-connect-core-1_0.html#ThirdPartyInitiatedLogin + pub initiate_login_uri: Option, + + /// `request_uri` values that are pre-registered by the client for use at + /// the provider. + /// + /// Providers can require that `request_uri` values used be pre-registered + /// with the `require_request_uri_registration` discovery parameter. + /// + /// Servers MAY cache the contents of the files referenced by these URIs and + /// not retrieve them at the time they are used in a request. If the + /// contents of the request file could ever change, these URI values should + /// include the base64url encoded SHA-256 hash value of the file contents + /// referenced by the URI as the value of the URI fragment. If the fragment + /// value used for a URI changes, that signals the server that its cached + /// value for that URI with the old fragment value is no longer valid. + pub request_uris: Option>, + + /// Whether the client will only send authorization requests as [Request + /// Objects]. + /// + /// Defaults to `false`. + /// + /// [Request Object]: https://www.rfc-editor.org/rfc/rfc9101.html + pub require_signed_request_object: Option, + + /// Whether the client will only send authorization requests via the [pushed + /// authorization request endpoint]. + /// + /// Defaults to `false`. + /// + /// [pushed authorization request endpoint]: https://www.rfc-editor.org/rfc/rfc9126.html + pub require_pushed_authorization_requests: Option, + + /// [JWS] `alg` algorithm for signing responses of the [introspection + /// endpoint]. + /// + /// [JWS]: http://tools.ietf.org/html/draft-ietf-jose-json-web-signature + /// [introspection endpoint]: https://www.rfc-editor.org/info/rfc7662 + pub introspection_signed_response_alg: Option, + + /// [JWE] `alg` algorithm for encrypting responses of the [introspection + /// endpoint]. + /// + /// If `introspection_signed_response_alg` is not provided, this field has + /// no effect. + /// + /// This field is required if `introspection_encrypted_response_enc` is + /// provided. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + /// [introspection endpoint]: https://www.rfc-editor.org/info/rfc7662 + pub introspection_encrypted_response_alg: Option, + + /// [JWE] `enc` algorithm for encrypting responses of the [introspection + /// endpoint]. + /// + /// If `introspection_signed_response_alg` is not provided, this field has + /// no effect. + /// + /// Defaults to [`DEFAULT_ENCRYPTION_ENC_ALGORITHM`] if + /// `introspection_encrypted_response_alg` is provided. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + /// [introspection endpoint]: https://www.rfc-editor.org/info/rfc7662 + pub introspection_encrypted_response_enc: Option, + + /// `post_logout_redirect_uri` values that are pre-registered by the client + /// for use at the provider's [RP-Initiated Logout endpoint]. + /// + /// [RP-Initiated Logout endpoint]: https://openid.net/specs/openid-connect-rpinitiated-1_0.html + pub post_logout_redirect_uris: Option>, +} + +impl ClientMetadata { + /// Validate this `ClientMetadata` according to the [OpenID Connect Dynamic + /// Client Registration Spec 1.0]. + /// + /// # Errors + /// + /// Will return `Err` if validation fails. + /// + /// [OpenID Connect Dynamic Client Registration Spec 1.0]: https://openid.net/specs/openid-connect-registration-1_0.html#ClientMetadata + pub fn validate(self) -> Result { + let grant_types = self.grant_types(); + let has_implicit = grant_types.contains(&GrantType::Implicit); + let has_authorization_code = grant_types.contains(&GrantType::AuthorizationCode); + let has_both = has_implicit && has_authorization_code; + + if let Some(uris) = &self.redirect_uris { + if let Some(uri) = uris.iter().find(|uri| uri.fragment().is_some()) { + return Err(ClientMetadataVerificationError::RedirectUriWithFragment( + uri.clone(), + )); + } + } else if has_authorization_code || has_implicit { + // Required for authorization code and implicit flows + return Err(ClientMetadataVerificationError::MissingRedirectUris); + } + + let response_type_code = [OAuthAuthorizationEndpointResponseType::Code.into()]; + let response_types = match &self.response_types { + Some(types) => &types[..], + // Default to code only if the client uses the authorization code or implicit flow + None if has_authorization_code || has_implicit => &response_type_code[..], + None => &[], + }; + + for response_type in response_types { + let has_code = response_type.has_code(); + let has_id_token = response_type.has_id_token(); + let has_token = response_type.has_token(); + let is_ok = has_code && has_both + || !has_code && has_implicit + || has_authorization_code && !has_id_token && !has_token + || !has_code && !has_id_token && !has_token; + + if !is_ok { + return Err(ClientMetadataVerificationError::IncoherentResponseType( + response_type.clone(), + )); + } + } + + if self.jwks_uri.is_some() && self.jwks.is_some() { + return Err(ClientMetadataVerificationError::JwksUriAndJwksMutuallyExclusive); + } + + if let Some(url) = self + .sector_identifier_uri + .as_ref() + .filter(|url| url.scheme() != "https") + { + return Err(ClientMetadataVerificationError::UrlNonHttpsScheme( + "sector_identifier_uri", + url.clone(), + )); + } + + if *self.token_endpoint_auth_method() == OAuthClientAuthenticationMethod::PrivateKeyJwt + && self.jwks_uri.is_none() + && self.jwks.is_none() + { + return Err(ClientMetadataVerificationError::MissingJwksForTokenMethod); + } + + if let Some(alg) = &self.token_endpoint_auth_signing_alg { + if *alg == JsonWebSignatureAlg::None { + return Err(ClientMetadataVerificationError::UnauthorizedSigningAlgNone( + "token_endpoint", + )); + } + } else if matches!( + self.token_endpoint_auth_method(), + OAuthClientAuthenticationMethod::PrivateKeyJwt + | OAuthClientAuthenticationMethod::ClientSecretJwt + ) { + return Err(ClientMetadataVerificationError::MissingAuthSigningAlg( + "token_endpoint", + )); + } + + if *self.id_token_signed_response_alg() == JsonWebSignatureAlg::None + && response_types.iter().any(ResponseType::has_id_token) + { + return Err(ClientMetadataVerificationError::IdTokenSigningAlgNone); + } + + if self.id_token_encrypted_response_enc.is_some() { + self.id_token_encrypted_response_alg.as_ref().ok_or( + ClientMetadataVerificationError::MissingEncryptionAlg("id_token"), + )?; + } + + if self.userinfo_encrypted_response_enc.is_some() { + self.userinfo_encrypted_response_alg.as_ref().ok_or( + ClientMetadataVerificationError::MissingEncryptionAlg("userinfo"), + )?; + } + + if self.request_object_encryption_enc.is_some() { + self.request_object_encryption_alg.as_ref().ok_or( + ClientMetadataVerificationError::MissingEncryptionAlg("request_object"), + )?; + } + + if let Some(url) = self + .initiate_login_uri + .as_ref() + .filter(|url| url.scheme() != "https") + { + return Err(ClientMetadataVerificationError::UrlNonHttpsScheme( + "initiate_login_uri", + url.clone(), + )); + } + + if self.introspection_encrypted_response_enc.is_some() { + self.introspection_encrypted_response_alg.as_ref().ok_or( + ClientMetadataVerificationError::MissingEncryptionAlg("introspection"), + )?; + } + + Ok(VerifiedClientMetadata { inner: self }) + } + + /// Sort the properties. This is inteded to ensure a stable serialization + /// order when needed. + #[must_use] + pub fn sorted(mut self) -> Self { + // This sorts all the Vec and Localized fields + if let Some(redirect_uris) = &mut self.redirect_uris { + redirect_uris.sort(); + } + if let Some(response_types) = &mut self.response_types { + response_types.sort(); + } + if let Some(grant_types) = &mut self.grant_types { + grant_types.sort(); + } + if let Some(contacts) = &mut self.contacts { + contacts.sort(); + } + if let Some(client_name) = &mut self.client_name { + client_name.sort(); + } + if let Some(logo_uri) = &mut self.logo_uri { + logo_uri.sort(); + } + if let Some(client_uri) = &mut self.client_uri { + client_uri.sort(); + } + if let Some(policy_uri) = &mut self.policy_uri { + policy_uri.sort(); + } + if let Some(tos_uri) = &mut self.tos_uri { + tos_uri.sort(); + } + if let Some(default_acr_values) = &mut self.default_acr_values { + default_acr_values.sort(); + } + if let Some(request_uris) = &mut self.request_uris { + request_uris.sort(); + } + if let Some(post_logout_redirect_uris) = &mut self.post_logout_redirect_uris { + post_logout_redirect_uris.sort(); + } + + self + } + + /// Array of the [OAuth 2.0 `response_type` values] that the client can use + /// at the [authorization endpoint]. + /// + /// All the types used by the client in an authorization request's + /// `response_type` field must appear in this list. + /// + /// Defaults to [`DEFAULT_RESPONSE_TYPES`]. + /// + /// [OAuth 2.0 `response_type` values]: https://www.rfc-editor.org/rfc/rfc7591#page-9 + /// [authorization endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.1 + #[must_use] + pub fn response_types(&self) -> Vec { + self.response_types.clone().unwrap_or_else(|| { + DEFAULT_RESPONSE_TYPES + .into_iter() + .map(ResponseType::from) + .collect() + }) + } + + /// Array of [OAuth 2.0 `grant_type` values] that the client can use at the + /// [token endpoint]. + /// + /// Note that the possible grant types depend on the response types. + /// + /// All the types used by the client in a token request's `grant_type` field + /// must appear in this list. + /// + /// Defaults to [`DEFAULT_GRANT_TYPES`]. + /// + /// [OAuth 2.0 `grant_type` values]: https://www.rfc-editor.org/rfc/rfc7591#page-9 + /// [token endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.2 + #[must_use] + pub fn grant_types(&self) -> &[GrantType] { + self.grant_types.as_deref().unwrap_or(DEFAULT_GRANT_TYPES) + } + + /// The kind of the application. + /// + /// Defaults to [`DEFAULT_APPLICATION_TYPE`]. + #[must_use] + pub fn application_type(&self) -> ApplicationType { + self.application_type + .clone() + .unwrap_or(DEFAULT_APPLICATION_TYPE) + } + + /// Requested client authentication method for the [token endpoint]. + /// + /// Defaults to [`DEFAULT_TOKEN_AUTH_METHOD`]. + /// + /// [token endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.2 + #[must_use] + pub fn token_endpoint_auth_method(&self) -> &OAuthClientAuthenticationMethod { + self.token_endpoint_auth_method + .as_ref() + .unwrap_or(DEFAULT_TOKEN_AUTH_METHOD) + } + + /// [JWS] `alg` algorithm required for signing the ID Token issued to this + /// client. + /// + /// If this field is present, it must not be + /// [`JsonWebSignatureAlg::None`], unless the client uses only response + /// types that return no ID Token from the authorization endpoint. + /// + /// Defaults to [`DEFAULT_SIGNING_ALGORITHM`]. + /// + /// [JWS]: http://tools.ietf.org/html/draft-ietf-jose-json-web-signature + #[must_use] + pub fn id_token_signed_response_alg(&self) -> &JsonWebSignatureAlg { + self.id_token_signed_response_alg + .as_ref() + .unwrap_or(DEFAULT_SIGNING_ALGORITHM) + } + + /// [JWE] `alg` and `enc` algorithms required for encrypting the ID Token + /// issued to this client. + /// + /// Always returns `Some` if `id_token_encrypted_response_alg` is provided, + /// using the default of [`DEFAULT_ENCRYPTION_ENC_ALGORITHM`] for the `enc` + /// value if needed. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + #[must_use] + pub fn id_token_encrypted_response( + &self, + ) -> Option<(&JsonWebEncryptionAlg, &JsonWebEncryptionEnc)> { + self.id_token_encrypted_response_alg.as_ref().map(|alg| { + ( + alg, + self.id_token_encrypted_response_enc + .as_ref() + .unwrap_or(DEFAULT_ENCRYPTION_ENC_ALGORITHM), + ) + }) + } + + /// [JWE] `alg` and `enc` algorithms required for encrypting user info + /// responses. + /// + /// Always returns `Some` if `userinfo_encrypted_response_alg` is provided, + /// using the default of [`DEFAULT_ENCRYPTION_ENC_ALGORITHM`] for the `enc` + /// value if needed. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + #[must_use] + pub fn userinfo_encrypted_response( + &self, + ) -> Option<(&JsonWebEncryptionAlg, &JsonWebEncryptionEnc)> { + self.userinfo_encrypted_response_alg.as_ref().map(|alg| { + ( + alg, + self.userinfo_encrypted_response_enc + .as_ref() + .unwrap_or(DEFAULT_ENCRYPTION_ENC_ALGORITHM), + ) + }) + } + + /// [JWE] `alg` and `enc` algorithms the client is declaring that it may use + /// for encrypting Request Objects sent to the provider. + /// + /// Always returns `Some` if `request_object_encryption_alg` is provided, + /// using the default of [`DEFAULT_ENCRYPTION_ENC_ALGORITHM`] for the `enc` + /// value if needed. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + #[must_use] + pub fn request_object_encryption( + &self, + ) -> Option<(&JsonWebEncryptionAlg, &JsonWebEncryptionEnc)> { + self.request_object_encryption_alg.as_ref().map(|alg| { + ( + alg, + self.request_object_encryption_enc + .as_ref() + .unwrap_or(DEFAULT_ENCRYPTION_ENC_ALGORITHM), + ) + }) + } + + /// Whether the `auth_time` Claim in the ID Token is required. + /// + /// Defaults to `false`. + #[must_use] + pub fn require_auth_time(&self) -> bool { + self.require_auth_time.unwrap_or_default() + } + + /// Whether the client will only send authorization requests as [Request + /// Objects]. + /// + /// Defaults to `false`. + /// + /// [Request Object]: https://www.rfc-editor.org/rfc/rfc9101.html + #[must_use] + pub fn require_signed_request_object(&self) -> bool { + self.require_signed_request_object.unwrap_or_default() + } + + /// Whether the client will only send authorization requests via the [pushed + /// authorization request endpoint]. + /// + /// Defaults to `false`. + /// + /// [pushed authorization request endpoint]: https://www.rfc-editor.org/rfc/rfc9126.html + #[must_use] + pub fn require_pushed_authorization_requests(&self) -> bool { + self.require_pushed_authorization_requests + .unwrap_or_default() + } + + /// [JWE] `alg` and `enc` algorithms for encrypting responses of the + /// [introspection endpoint]. + /// + /// Always returns `Some` if `introspection_encrypted_response_alg` is + /// provided, using the default of [`DEFAULT_ENCRYPTION_ENC_ALGORITHM`] for + /// the `enc` value if needed. + /// + /// [JWE]: http://tools.ietf.org/html/draft-ietf-jose-json-web-encryption + /// [introspection endpoint]: https://www.rfc-editor.org/info/rfc7662 + #[must_use] + pub fn introspection_encrypted_response( + &self, + ) -> Option<(&JsonWebEncryptionAlg, &JsonWebEncryptionEnc)> { + self.introspection_encrypted_response_alg + .as_ref() + .map(|alg| { + ( + alg, + self.introspection_encrypted_response_enc + .as_ref() + .unwrap_or(DEFAULT_ENCRYPTION_ENC_ALGORITHM), + ) + }) + } +} + +/// The verified client metadata. +/// +/// All the fields required by the [OpenID Connect Dynamic Client Registration +/// Spec 1.0] or with a default value are accessible via methods. +/// +/// To access other fields, use this type's `Deref` implementation. +/// +/// [OpenID Connect Dynamic Client Registration Spec 1.0]: https://openid.net/specs/openid-connect-registration-1_0.html#ClientMetadata +#[derive(Serialize, Debug, PartialEq, Eq, Clone)] +#[serde(into = "ClientMetadataSerdeHelper")] +pub struct VerifiedClientMetadata { + inner: ClientMetadata, +} + +impl VerifiedClientMetadata { + /// Array of redirection URIs for use in redirect-based flows such as the + /// [authorization code flow]. + /// + /// All the URIs used by the client in an authorization request's + /// `redirect_uri` field must appear in this list. + /// + /// [authorization code flow]: https://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth + #[must_use] + pub fn redirect_uris(&self) -> &[Url] { + match &self.redirect_uris { + Some(v) => v, + None => &[], + } + } +} + +impl Deref for VerifiedClientMetadata { + type Target = ClientMetadata; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +/// All errors that can happen when verifying [`ClientMetadata`]. +#[derive(Debug, Error)] +pub enum ClientMetadataVerificationError { + /// The redirect URIs are missing. + #[error("redirect URIs are missing")] + MissingRedirectUris, + + /// The redirect URI has a fragment, which is not allowed. + #[error("redirect URI with fragment: {0}")] + RedirectUriWithFragment(Url), + + /// The given response type is not compatible with the grant types. + #[error("'{0}' response type not compatible with grant types")] + IncoherentResponseType(ResponseType), + + /// Both the `jwks_uri` and `jwks` fields are present but only one is + /// allowed. + #[error("jwks_uri and jwks are mutually exclusive")] + JwksUriAndJwksMutuallyExclusive, + + /// The URL of the given field doesn't use a `https` scheme. + #[error("{0}'s URL doesn't use a https scheme: {1}")] + UrlNonHttpsScheme(&'static str, Url), + + /// No JWK Set was provided but one is required for the token auth method. + #[error("missing JWK Set for token auth method")] + MissingJwksForTokenMethod, + + /// The given endpoint doesn't allow `none` as a signing algorithm. + #[error("none signing alg unauthorized for {0}")] + UnauthorizedSigningAlgNone(&'static str), + + /// The given endpoint is missing an auth signing algorithm, but it is + /// required because it uses one of the `client_secret_jwt` or + /// `private_key_jwt` authentication methods. + #[error("{0} missing auth signing algorithm")] + MissingAuthSigningAlg(&'static str), + + /// `none` is used as the signing algorithm for ID Tokens, but is not + /// allowed. + #[error("ID Token signing alg is none")] + IdTokenSigningAlgNone, + + /// The given encryption field has an `enc` value but not `alg` value. + #[error("{0} missing encryption alg value")] + MissingEncryptionAlg(&'static str), +} + +/// The issuer response to dynamic client registration. +#[serde_as] +#[skip_serializing_none] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] +pub struct ClientRegistrationResponse { + /// A unique client identifier. + pub client_id: String, + + /// A client secret, if the `token_endpoint_auth_method` requires one. + #[serde(default)] + pub client_secret: Option, + + /// Time at which the Client Identifier was issued. + #[serde(default)] + #[serde_as(as = "Option>")] + pub client_id_issued_at: Option>, + + /// Time at which the `client_secret` will expire or 0 if it will not + /// expire. + /// + /// Required if `client_secret` is issued. + #[serde(default)] + #[serde_as(as = "Option>")] + pub client_secret_expires_at: Option>, +} + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + use mas_iana::{ + jose::{JsonWebEncryptionAlg, JsonWebEncryptionEnc, JsonWebSignatureAlg}, + oauth::{OAuthAuthorizationEndpointResponseType, OAuthClientAuthenticationMethod}, + }; + use mas_jose::jwk::PublicJsonWebKeySet; + use url::Url; + + use super::{ClientMetadata, ClientMetadataVerificationError}; + use crate::{requests::GrantType, response_type::ResponseType}; + + fn valid_client_metadata() -> ClientMetadata { + ClientMetadata { + redirect_uris: Some(vec![Url::parse("http://localhost/oidc").unwrap()]), + ..Default::default() + } + } + + fn jwks() -> PublicJsonWebKeySet { + serde_json::from_value(serde_json::json!({ + "keys": [ + { + "alg": "RS256", + "kty": "RSA", + "n": "tCwhHOxX_ylh5kVwfVqW7QIBTIsPjkjCjVCppDrynuF_3msEdtEaG64eJUz84ODFNMCC0BQ57G7wrKQVWkdSDxWUEqGk2BixBiHJRWZdofz1WOBTdPVicvHW5Zl_aIt7uXWMdOp_SODw-O2y2f05EqbFWFnR2-1y9K8KbiOp82CD72ny1Jbb_3PxTs2Z0F4ECAtTzpDteaJtjeeueRjr7040JAjQ-5fpL5D1g8x14LJyVIo-FL_y94NPFbMp7UCi69CIfVHXFO8WYFz949og-47mWRrID5lS4zpx-QLuvNhUb_lSqmylUdQB3HpRdOcYdj3xwy4MHJuu7tTaf0AmCQ", + "use": "sig", + "kid": "d98f49bc6ca4581eae8dfadd494fce10ea23aab0", + "e": "AQAB" + } + ] + })).unwrap() + } + + #[test] + fn validate_required_metadata() { + let metadata = valid_client_metadata(); + metadata.validate().unwrap(); + } + + #[test] + fn validate_redirect_uris() { + let mut metadata = ClientMetadata::default(); + + // Err - Missing + assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::MissingRedirectUris) + ); + + // Err - Fragment + let wrong_uri = Url::parse("http://localhost/#fragment").unwrap(); + metadata.redirect_uris = Some(vec![ + Url::parse("http://localhost/").unwrap(), + wrong_uri.clone(), + ]); + let uri = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::RedirectUriWithFragment(uri)) => uri + ); + assert_eq!(uri, wrong_uri); + + // Ok - Path & Query + metadata.redirect_uris = Some(vec![ + Url::parse("http://localhost/").unwrap(), + Url::parse("http://localhost/oidc").unwrap(), + Url::parse("http://localhost/?oidc").unwrap(), + Url::parse("http://localhost/my-client?oidc").unwrap(), + ]); + metadata.validate().unwrap(); + } + + #[test] + fn validate_response_types() { + let mut metadata = valid_client_metadata(); + + // grant_type = authorization_code + // code - Ok + metadata.response_types = Some(vec![OAuthAuthorizationEndpointResponseType::Code.into()]); + metadata.clone().validate().unwrap(); + + // code id_token - Err + let response_type: ResponseType = + OAuthAuthorizationEndpointResponseType::CodeIdToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // code id_token token - Err + let response_type: ResponseType = + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // code token - Err + let response_type: ResponseType = OAuthAuthorizationEndpointResponseType::CodeToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // id_token - Err + let response_type: ResponseType = OAuthAuthorizationEndpointResponseType::IdToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // id_token token - Err + let response_type: ResponseType = + OAuthAuthorizationEndpointResponseType::IdTokenToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // token - Err + let response_type: ResponseType = + OAuthAuthorizationEndpointResponseType::IdTokenToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // none - Ok + metadata.response_types = Some(vec![OAuthAuthorizationEndpointResponseType::None.into()]); + metadata.clone().validate().unwrap(); + + // grant_type = implicit + metadata.grant_types = Some(vec![GrantType::Implicit]); + // code - Err + let response_type: ResponseType = OAuthAuthorizationEndpointResponseType::Code.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // code id_token - Err + let response_type: ResponseType = + OAuthAuthorizationEndpointResponseType::CodeIdToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // code id_token token - Err + let response_type: ResponseType = + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // code token - Err + let response_type: ResponseType = OAuthAuthorizationEndpointResponseType::CodeToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // id_token - Ok + metadata.response_types = + Some(vec![OAuthAuthorizationEndpointResponseType::IdToken.into()]); + metadata.clone().validate().unwrap(); + + // id_token token - Ok + metadata.response_types = Some(vec![ + OAuthAuthorizationEndpointResponseType::IdTokenToken.into(), + ]); + metadata.clone().validate().unwrap(); + + // token - Ok + metadata.response_types = Some(vec![OAuthAuthorizationEndpointResponseType::Token.into()]); + metadata.clone().validate().unwrap(); + + // none - Ok + metadata.response_types = Some(vec![OAuthAuthorizationEndpointResponseType::None.into()]); + metadata.clone().validate().unwrap(); + + // grant_types = [authorization_code, implicit] + metadata.grant_types = Some(vec![GrantType::AuthorizationCode, GrantType::Implicit]); + // code - Ok + metadata.response_types = Some(vec![OAuthAuthorizationEndpointResponseType::Code.into()]); + metadata.clone().validate().unwrap(); + + // code id_token - Ok + metadata.response_types = Some(vec![ + OAuthAuthorizationEndpointResponseType::CodeIdToken.into(), + ]); + metadata.clone().validate().unwrap(); + + // code id_token token - Ok + metadata.response_types = Some(vec![ + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken.into(), + ]); + metadata.clone().validate().unwrap(); + + // code token - Ok + metadata.response_types = Some(vec![ + OAuthAuthorizationEndpointResponseType::CodeToken.into(), + ]); + metadata.clone().validate().unwrap(); + + // id_token - Ok + metadata.response_types = + Some(vec![OAuthAuthorizationEndpointResponseType::IdToken.into()]); + metadata.clone().validate().unwrap(); + + // id_token token - Ok + metadata.response_types = Some(vec![ + OAuthAuthorizationEndpointResponseType::IdTokenToken.into(), + ]); + metadata.clone().validate().unwrap(); + + // token - Ok + metadata.response_types = Some(vec![OAuthAuthorizationEndpointResponseType::Token.into()]); + metadata.clone().validate().unwrap(); + + // none - Ok + metadata.response_types = Some(vec![OAuthAuthorizationEndpointResponseType::None.into()]); + metadata.clone().validate().unwrap(); + + // other grant_types + metadata.grant_types = Some(vec![GrantType::RefreshToken, GrantType::ClientCredentials]); + // code - Err + let response_type: ResponseType = OAuthAuthorizationEndpointResponseType::Code.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // code id_token - Err + let response_type: ResponseType = + OAuthAuthorizationEndpointResponseType::CodeIdToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // code id_token token - Err + let response_type: ResponseType = + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // code token - Err + let response_type: ResponseType = OAuthAuthorizationEndpointResponseType::CodeToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // id_token - Err + let response_type: ResponseType = OAuthAuthorizationEndpointResponseType::IdToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // id_token token - Err + let response_type: ResponseType = + OAuthAuthorizationEndpointResponseType::IdTokenToken.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // token - Err + let response_type: ResponseType = OAuthAuthorizationEndpointResponseType::Token.into(); + metadata.response_types = Some(vec![response_type.clone()]); + let res = assert_matches!(metadata.clone().validate(), Err(ClientMetadataVerificationError::IncoherentResponseType(res)) => res); + assert_eq!(res, response_type); + + // none - Ok + metadata.response_types = Some(vec![OAuthAuthorizationEndpointResponseType::None.into()]); + metadata.validate().unwrap(); + } + + #[test] + fn validate_jwks() { + let mut metadata = valid_client_metadata(); + + // Ok - jwks_uri is set + metadata.jwks_uri = Some(Url::parse("http://localhost/jwks").unwrap()); + metadata.clone().validate().unwrap(); + + // Err - Both are set + metadata.jwks = Some(jwks()); + assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::JwksUriAndJwksMutuallyExclusive) + ); + + // Ok - jwks is set + metadata.jwks_uri = None; + metadata.validate().unwrap(); + } + + #[test] + fn validate_sector_identifier_uri() { + let mut metadata = valid_client_metadata(); + + // Err - Non-https URL + let identifier_uri = Url::parse("http://localhost/").unwrap(); + metadata.sector_identifier_uri = Some(identifier_uri.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "sector_identifier_uri"); + assert_eq!(url, identifier_uri); + + // Ok - https URL + metadata.sector_identifier_uri = Some(Url::parse("https://localhost/").unwrap()); + metadata.validate().unwrap(); + } + + #[test] + fn validate_token_endpoint_auth_method() { + let mut metadata = valid_client_metadata(); + + // Err - token_endpoint_auth_signing_alg is none + metadata.token_endpoint_auth_signing_alg = Some(JsonWebSignatureAlg::None); + let field = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::UnauthorizedSigningAlgNone(field)) => field + ); + assert_eq!(field, "token_endpoint"); + + // private_key_jwt + metadata.token_endpoint_auth_method = Some(OAuthClientAuthenticationMethod::PrivateKeyJwt); + metadata.token_endpoint_auth_signing_alg = Some(JsonWebSignatureAlg::Rs256); + + // Err - No JWKS + assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::MissingJwksForTokenMethod) + ); + + // Ok - jwks_uri + metadata.jwks_uri = Some(Url::parse("https://localhost/jwks").unwrap()); + metadata.clone().validate().unwrap(); + + // Ok - jwks + metadata.jwks_uri = None; + metadata.jwks = Some(jwks()); + metadata.clone().validate().unwrap(); + + // Err - No token_endpoint_auth_signing_alg + metadata.token_endpoint_auth_signing_alg = None; + let field = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::MissingAuthSigningAlg(field)) => field + ); + assert_eq!(field, "token_endpoint"); + + // client_secret_jwt + metadata.token_endpoint_auth_method = + Some(OAuthClientAuthenticationMethod::ClientSecretJwt); + metadata.jwks = None; + + // Err - No token_endpoint_auth_signing_alg + let field = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::MissingAuthSigningAlg(field)) => field + ); + assert_eq!(field, "token_endpoint"); + + // Ok - Has token_endpoint_auth_signing_alg + metadata.token_endpoint_auth_signing_alg = Some(JsonWebSignatureAlg::Rs256); + metadata.validate().unwrap(); + } + + #[test] + fn validate_id_token_signed_response_alg() { + let mut metadata = valid_client_metadata(); + metadata.id_token_signed_response_alg = Some(JsonWebSignatureAlg::None); + metadata.grant_types = Some(vec![GrantType::AuthorizationCode, GrantType::Implicit]); + + // Err - code id_token + metadata.response_types = Some(vec![ + OAuthAuthorizationEndpointResponseType::CodeIdToken.into(), + ]); + assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::IdTokenSigningAlgNone) + ); + + // Err - code id_token token + metadata.response_types = Some(vec![ + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken.into(), + ]); + assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::IdTokenSigningAlgNone) + ); + + // Err - id_token + metadata.response_types = + Some(vec![OAuthAuthorizationEndpointResponseType::IdToken.into()]); + assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::IdTokenSigningAlgNone) + ); + + // Err - id_token token + metadata.response_types = Some(vec![ + OAuthAuthorizationEndpointResponseType::IdTokenToken.into(), + ]); + assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::IdTokenSigningAlgNone) + ); + + // Ok - Other response types + metadata.response_types = Some(vec![ + OAuthAuthorizationEndpointResponseType::Code.into(), + OAuthAuthorizationEndpointResponseType::CodeToken.into(), + OAuthAuthorizationEndpointResponseType::Token.into(), + OAuthAuthorizationEndpointResponseType::None.into(), + ]); + metadata.validate().unwrap(); + } + + #[test] + fn validate_id_token_encrypted_response() { + let mut metadata = valid_client_metadata(); + metadata.id_token_encrypted_response_enc = Some(JsonWebEncryptionEnc::A128CbcHs256); + + // Err - No id_token_encrypted_response_alg + let field = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::MissingEncryptionAlg(field)) => field + ); + assert_eq!(field, "id_token"); + + // Ok - Has id_token_encrypted_response_alg + metadata.id_token_encrypted_response_alg = Some(JsonWebEncryptionAlg::RsaOaep); + metadata.validate().unwrap(); + } + + #[test] + fn validate_userinfo_encrypted_response() { + let mut metadata = valid_client_metadata(); + metadata.userinfo_encrypted_response_enc = Some(JsonWebEncryptionEnc::A128CbcHs256); + + // Err - No userinfo_encrypted_response_alg + let field = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::MissingEncryptionAlg(field)) => field + ); + assert_eq!(field, "userinfo"); + + // Ok - Has userinfo_encrypted_response_alg + metadata.userinfo_encrypted_response_alg = Some(JsonWebEncryptionAlg::RsaOaep); + metadata.validate().unwrap(); + } + + #[test] + fn validate_request_object_encryption() { + let mut metadata = valid_client_metadata(); + metadata.request_object_encryption_enc = Some(JsonWebEncryptionEnc::A128CbcHs256); + + // Err - No request_object_encryption_alg + let field = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::MissingEncryptionAlg(field)) => field + ); + assert_eq!(field, "request_object"); + + // Ok - Has request_object_encryption_alg + metadata.request_object_encryption_alg = Some(JsonWebEncryptionAlg::RsaOaep); + metadata.validate().unwrap(); + } + + #[test] + fn validate_initiate_login_uri() { + let mut metadata = valid_client_metadata(); + + // Err - Non-https URL + let initiate_uri = Url::parse("http://localhost/").unwrap(); + metadata.initiate_login_uri = Some(initiate_uri.clone()); + let (field, url) = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::UrlNonHttpsScheme(field, url)) => (field, url) + ); + assert_eq!(field, "initiate_login_uri"); + assert_eq!(url, initiate_uri); + + // Ok - https URL + metadata.initiate_login_uri = Some(Url::parse("https://localhost/").unwrap()); + metadata.validate().unwrap(); + } + + #[test] + fn validate_introspection_encrypted_response() { + let mut metadata = valid_client_metadata(); + metadata.introspection_encrypted_response_enc = Some(JsonWebEncryptionEnc::A128CbcHs256); + + // Err - No introspection_encrypted_response_alg + let field = assert_matches!( + metadata.clone().validate(), + Err(ClientMetadataVerificationError::MissingEncryptionAlg(field)) => field + ); + assert_eq!(field, "introspection"); + + // Ok - Has introspection_encrypted_response_alg + metadata.introspection_encrypted_response_alg = Some(JsonWebEncryptionAlg::RsaOaep); + metadata.validate().unwrap(); + } +} diff --git a/matrix-authentication-service/crates/oauth2-types/src/requests.rs b/matrix-authentication-service/crates/oauth2-types/src/requests.rs new file mode 100644 index 00000000..4c9f1117 --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/requests.rs @@ -0,0 +1,1060 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Requests and response types to interact with the [OAuth 2.0] specification. +//! +//! [OAuth 2.0]: https://oauth.net/2/ + +use std::{collections::HashSet, fmt, hash::Hash, num::NonZeroU32}; + +use chrono::{DateTime, Duration, Utc}; +use language_tags::LanguageTag; +use mas_iana::oauth::{OAuthAccessTokenType, OAuthTokenTypeHint}; +use serde::{Deserialize, Serialize}; +use serde_with::{ + DeserializeFromStr, DisplayFromStr, DurationSeconds, SerializeDisplay, StringWithSeparator, + TimestampSeconds, formats::SpaceSeparator, serde_as, skip_serializing_none, +}; +use url::Url; + +use crate::{response_type::ResponseType, scope::Scope}; + +// ref: https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml + +/// The mechanism to be used for returning Authorization Response parameters +/// from the Authorization Endpoint. +/// +/// Defined in [OAuth 2.0 Multiple Response Type Encoding Practices](https://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#ResponseModes). +#[derive( + Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, SerializeDisplay, DeserializeFromStr, +)] +#[non_exhaustive] +pub enum ResponseMode { + /// Authorization Response parameters are encoded in the query string added + /// to the `redirect_uri`. + Query, + + /// Authorization Response parameters are encoded in the fragment added to + /// the `redirect_uri`. + Fragment, + + /// Authorization Response parameters are encoded as HTML form values that + /// are auto-submitted in the User Agent, and thus are transmitted via the + /// HTTP `POST` method to the Client, with the result parameters being + /// encoded in the body using the `application/x-www-form-urlencoded` + /// format. + /// + /// Defined in [OAuth 2.0 Form Post Response Mode](https://openid.net/specs/oauth-v2-form-post-response-mode-1_0.html). + FormPost, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for ResponseMode { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + ResponseMode::Query => f.write_str("query"), + ResponseMode::Fragment => f.write_str("fragment"), + ResponseMode::FormPost => f.write_str("form_post"), + ResponseMode::Unknown(s) => f.write_str(s), + } + } +} + +impl core::str::FromStr for ResponseMode { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "query" => Ok(ResponseMode::Query), + "fragment" => Ok(ResponseMode::Fragment), + "form_post" => Ok(ResponseMode::FormPost), + s => Ok(ResponseMode::Unknown(s.to_owned())), + } + } +} + +/// Value that specifies how the Authorization Server displays the +/// authentication and consent user interface pages to the End-User. +/// +/// Defined in [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest). +#[derive( + Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, SerializeDisplay, DeserializeFromStr, +)] +#[non_exhaustive] +#[derive(Default)] +pub enum Display { + /// The Authorization Server should display the authentication and consent + /// UI consistent with a full User Agent page view. + /// + /// This is the default display mode. + #[default] + Page, + + /// The Authorization Server should display the authentication and consent + /// UI consistent with a popup User Agent window. + Popup, + + /// The Authorization Server should display the authentication and consent + /// UI consistent with a device that leverages a touch interface. + Touch, + + /// The Authorization Server should display the authentication and consent + /// UI consistent with a "feature phone" type display. + Wap, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for Display { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Display::Page => f.write_str("page"), + Display::Popup => f.write_str("popup"), + Display::Touch => f.write_str("touch"), + Display::Wap => f.write_str("wap"), + Display::Unknown(s) => f.write_str(s), + } + } +} + +impl core::str::FromStr for Display { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "page" => Ok(Display::Page), + "popup" => Ok(Display::Popup), + "touch" => Ok(Display::Touch), + "wap" => Ok(Display::Wap), + s => Ok(Display::Unknown(s.to_owned())), + } + } +} + +/// Value that specifies whether the Authorization Server prompts the End-User +/// for reauthentication and consent. +/// +/// Defined in [OpenID Connect Core 1.0](https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest). +#[derive( + Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, SerializeDisplay, DeserializeFromStr, +)] +#[non_exhaustive] +pub enum Prompt { + /// The Authorization Server must not display any authentication or consent + /// user interface pages. + None, + + /// The Authorization Server should prompt the End-User for + /// reauthentication. + Login, + + /// The Authorization Server should prompt the End-User for consent before + /// returning information to the Client. + Consent, + + /// The Authorization Server should prompt the End-User to select a user + /// account. + /// + /// This enables an End-User who has multiple accounts at the Authorization + /// Server to select amongst the multiple accounts that they might have + /// current sessions for. + SelectAccount, + + /// The Authorization Server should prompt the End-User to create a user + /// account. + /// + /// Defined in [Initiating User Registration via OpenID Connect](https://openid.net/specs/openid-connect-prompt-create-1_0.html). + Create, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for Prompt { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Prompt::None => f.write_str("none"), + Prompt::Login => f.write_str("login"), + Prompt::Consent => f.write_str("consent"), + Prompt::SelectAccount => f.write_str("select_account"), + Prompt::Create => f.write_str("create"), + Prompt::Unknown(s) => f.write_str(s), + } + } +} + +impl core::str::FromStr for Prompt { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "none" => Ok(Prompt::None), + "login" => Ok(Prompt::Login), + "consent" => Ok(Prompt::Consent), + "select_account" => Ok(Prompt::SelectAccount), + "create" => Ok(Prompt::Create), + s => Ok(Prompt::Unknown(s.to_owned())), + } + } +} + +/// The body of a request to the [Authorization Endpoint]. +/// +/// [Authorization Endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.1 +#[skip_serializing_none] +#[serde_as] +#[derive(Serialize, Deserialize, Clone)] +pub struct AuthorizationRequest { + /// OAuth 2.0 Response Type value that determines the authorization + /// processing flow to be used. + pub response_type: ResponseType, + + /// OAuth 2.0 Client Identifier valid at the Authorization Server. + pub client_id: String, + + /// Redirection URI to which the response will be sent. + /// + /// This field is required when using a response type returning an + /// authorization code. + /// + /// This URI must have been pre-registered with the OpenID Provider. + pub redirect_uri: Option, + + /// The scope of the access request. + /// + /// OpenID Connect requests must contain the `openid` scope value. + pub scope: Scope, + + /// Opaque value used to maintain state between the request and the + /// callback. + pub state: Option, + + /// The mechanism to be used for returning parameters from the Authorization + /// Endpoint. + /// + /// This use of this parameter is not recommended when the Response Mode + /// that would be requested is the default mode specified for the Response + /// Type. + pub response_mode: Option, + + /// String value used to associate a Client session with an ID Token, and to + /// mitigate replay attacks. + pub nonce: Option, + + /// How the Authorization Server should display the authentication and + /// consent user interface pages to the End-User. + pub display: Option, + + /// Whether the Authorization Server should prompt the End-User for + /// reauthentication and consent. + /// + /// If [`Prompt::None`] is used, it must be the only value. + #[serde_as(as = "Option>")] + #[serde(default)] + pub prompt: Option>, + + /// The allowable elapsed time in seconds since the last time the End-User + /// was actively authenticated by the OpenID Provider. + #[serde(default)] + #[serde_as(as = "Option")] + pub max_age: Option, + + /// End-User's preferred languages and scripts for the user interface. + #[serde_as(as = "Option>")] + #[serde(default)] + pub ui_locales: Option>, + + /// ID Token previously issued by the Authorization Server being passed as a + /// hint about the End-User's current or past authenticated session with the + /// Client. + pub id_token_hint: Option, + + /// Hint to the Authorization Server about the login identifier the End-User + /// might use to log in. + pub login_hint: Option, + + /// Requested Authentication Context Class Reference values. + #[serde_as(as = "Option>")] + #[serde(default)] + pub acr_values: Option>, + + /// A JWT that contains the request's parameter values, called a [Request + /// Object]. + /// + /// [Request Object]: https://openid.net/specs/openid-connect-core-1_0.html#RequestObject + pub request: Option, + + /// A URI referencing a [Request Object] or a [Pushed Authorization + /// Request]. + /// + /// [Request Object]: https://openid.net/specs/openid-connect-core-1_0.html#RequestUriParameter + /// [Pushed Authorization Request]: https://datatracker.ietf.org/doc/html/rfc9126 + pub request_uri: Option, + + /// A JSON object containing the Client Metadata when interacting with a + /// [Self-Issued OpenID Provider]. + /// + /// [Self-Issued OpenID Provider]: https://openid.net/specs/openid-connect-core-1_0.html#SelfIssued + pub registration: Option, +} + +impl AuthorizationRequest { + /// Creates a basic `AuthorizationRequest`. + #[must_use] + pub fn new(response_type: ResponseType, client_id: String, scope: Scope) -> Self { + Self { + response_type, + client_id, + redirect_uri: None, + scope, + state: None, + response_mode: None, + nonce: None, + display: None, + prompt: None, + max_age: None, + ui_locales: None, + id_token_hint: None, + login_hint: None, + acr_values: None, + request: None, + request_uri: None, + registration: None, + } + } +} + +impl fmt::Debug for AuthorizationRequest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("AuthorizationRequest") + .field("response_type", &self.response_type) + .field("redirect_uri", &self.redirect_uri) + .field("scope", &self.scope) + .field("response_mode", &self.response_mode) + .field("display", &self.display) + .field("prompt", &self.prompt) + .field("max_age", &self.max_age) + .field("ui_locales", &self.ui_locales) + .field("login_hint", &self.login_hint) + .field("acr_values", &self.acr_values) + .field("request", &self.request) + .field("request_uri", &self.request_uri) + .field("registration", &self.registration) + .finish_non_exhaustive() + } +} + +/// A successful response from the [Authorization Endpoint]. +/// +/// [Authorization Endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.1 +#[skip_serializing_none] +#[serde_as] +#[derive(Serialize, Deserialize, Default, Clone)] +pub struct AuthorizationResponse { + /// The authorization code generated by the authorization server. + pub code: Option, + + /// The access token to access the requested scope. + pub access_token: Option, + + /// The type of the access token. + pub token_type: Option, + + /// ID Token value associated with the authenticated session. + pub id_token: Option, + + /// The duration for which the access token is valid. + #[serde_as(as = "Option>")] + pub expires_in: Option, +} + +impl fmt::Debug for AuthorizationResponse { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("AuthorizationResponse") + .field("token_type", &self.token_type) + .field("id_token", &self.id_token) + .field("expires_in", &self.expires_in) + .finish_non_exhaustive() + } +} + +/// A request to the [Device Authorization Endpoint]. +/// +/// [Device Authorization Endpoint]: https://www.rfc-editor.org/rfc/rfc8628 +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +pub struct DeviceAuthorizationRequest { + /// The scope of the access request. + pub scope: Option, +} + +/// The default value of the `interval` between polling requests, if it is not +/// set. +pub const DEFAULT_DEVICE_AUTHORIZATION_INTERVAL: Duration = Duration::microseconds(5 * 1000 * 1000); + +/// A successful response from the [Device Authorization Endpoint]. +/// +/// [Device Authorization Endpoint]: https://www.rfc-editor.org/rfc/rfc8628 +#[serde_as] +#[skip_serializing_none] +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct DeviceAuthorizationResponse { + /// The device verification code. + pub device_code: String, + + /// The end-user verification code. + pub user_code: String, + + /// The end-user verification URI on the authorization server. + /// + /// The URI should be short and easy to remember as end users will be asked + /// to manually type it into their user agent. + pub verification_uri: Url, + + /// A verification URI that includes the `user_code` (or other information + /// with the same function as the `user_code`), which is designed for + /// non-textual transmission. + pub verification_uri_complete: Option, + + /// The lifetime of the `device_code` and `user_code`. + #[serde_as(as = "DurationSeconds")] + pub expires_in: Duration, + + /// The minimum amount of time in seconds that the client should wait + /// between polling requests to the token endpoint. + /// + /// Defaults to [`DEFAULT_DEVICE_AUTHORIZATION_INTERVAL`]. + #[serde_as(as = "Option>")] + pub interval: Option, +} + +impl DeviceAuthorizationResponse { + /// The minimum amount of time in seconds that the client should wait + /// between polling requests to the token endpoint. + /// + /// Defaults to [`DEFAULT_DEVICE_AUTHORIZATION_INTERVAL`]. + #[must_use] + pub fn interval(&self) -> Duration { + self.interval + .unwrap_or(DEFAULT_DEVICE_AUTHORIZATION_INTERVAL) + } +} + +impl fmt::Debug for DeviceAuthorizationResponse { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("DeviceAuthorizationResponse") + .field("verification_uri", &self.verification_uri) + .field("expires_in", &self.expires_in) + .field("interval", &self.interval) + .finish_non_exhaustive() + } +} + +/// A request to the [Token Endpoint] for the [Authorization Code] grant type. +/// +/// [Token Endpoint]: https://www.rfc-editor.org/rfc/rfc6749#section-3.2 +/// [Authorization Code]: https://www.rfc-editor.org/rfc/rfc6749#section-4.1 +#[skip_serializing_none] +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct AuthorizationCodeGrant { + /// The authorization code that was returned from the authorization + /// endpoint. + pub code: String, + + /// The `redirect_uri` that was included in the authorization request. + /// + /// This field must match exactly the value passed to the authorization + /// endpoint. + pub redirect_uri: Option, + + /// The code verifier that matches the code challenge that was sent to the + /// authorization endpoint. + // TODO: move this somehow in the pkce module + pub code_verifier: Option, +} + +impl fmt::Debug for AuthorizationCodeGrant { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("AuthorizationCodeGrant") + .field("redirect_uri", &self.redirect_uri) + .finish_non_exhaustive() + } +} + +/// A request to the [Token Endpoint] for [refreshing an access token]. +/// +/// [Token Endpoint]: https://www.rfc-editor.org/rfc/rfc6749#section-3.2 +/// [refreshing an access token]: https://www.rfc-editor.org/rfc/rfc6749#section-6 +#[skip_serializing_none] +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct RefreshTokenGrant { + /// The refresh token issued to the client. + pub refresh_token: String, + + /// The scope of the access request. + /// + /// The requested scope must not include any scope not originally granted by + /// the resource owner, and if omitted is treated as equal to the scope + /// originally granted by the resource owner. + pub scope: Option, +} + +impl fmt::Debug for RefreshTokenGrant { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RefreshTokenGrant") + .field("scope", &self.scope) + .finish_non_exhaustive() + } +} + +/// A request to the [Token Endpoint] for the [Client Credentials] grant type. +/// +/// [Token Endpoint]: https://www.rfc-editor.org/rfc/rfc6749#section-3.2 +/// [Client Credentials]: https://www.rfc-editor.org/rfc/rfc6749#section-4.4 +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +pub struct ClientCredentialsGrant { + /// The scope of the access request. + pub scope: Option, +} + +/// A request to the [Token Endpoint] for the [Device Authorization] grant type. +/// +/// [Token Endpoint]: https://www.rfc-editor.org/rfc/rfc6749#section-3.2 +/// [Device Authorization]: https://www.rfc-editor.org/rfc/rfc8628 +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct DeviceCodeGrant { + /// The device verification code, from the device authorization response. + pub device_code: String, +} + +impl fmt::Debug for DeviceCodeGrant { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("DeviceCodeGrant").finish_non_exhaustive() + } +} + +/// All possible values for the `grant_type` parameter. +#[derive( + Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, SerializeDisplay, DeserializeFromStr, +)] +pub enum GrantType { + /// [`authorization_code`](https://www.rfc-editor.org/rfc/rfc6749#section-4.1) + AuthorizationCode, + + /// [`refresh_token`](https://www.rfc-editor.org/rfc/rfc6749#section-6) + RefreshToken, + + /// [`implicit`](https://www.rfc-editor.org/rfc/rfc6749#section-4.2) + Implicit, + + /// [`client_credentials`](https://www.rfc-editor.org/rfc/rfc6749#section-4.4) + ClientCredentials, + + /// [`password`](https://www.rfc-editor.org/rfc/rfc6749#section-4.3) + Password, + + /// [`urn:ietf:params:oauth:grant-type:device_code`](https://www.rfc-editor.org/rfc/rfc8628) + DeviceCode, + + /// [`https://datatracker.ietf.org/doc/html/rfc7523#section-2.1`](https://www.rfc-editor.org/rfc/rfc7523#section-2.1) + JwtBearer, + + /// [`urn:openid:params:grant-type:ciba`](https://openid.net/specs/openid-client-initiated-backchannel-authentication-core-1_0.html) + ClientInitiatedBackchannelAuthentication, + + /// An unknown value. + Unknown(String), +} + +impl core::fmt::Display for GrantType { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + GrantType::AuthorizationCode => f.write_str("authorization_code"), + GrantType::RefreshToken => f.write_str("refresh_token"), + GrantType::Implicit => f.write_str("implicit"), + GrantType::ClientCredentials => f.write_str("client_credentials"), + GrantType::Password => f.write_str("password"), + GrantType::DeviceCode => f.write_str("urn:ietf:params:oauth:grant-type:device_code"), + GrantType::JwtBearer => f.write_str("urn:ietf:params:oauth:grant-type:jwt-bearer"), + GrantType::ClientInitiatedBackchannelAuthentication => { + f.write_str("urn:openid:params:grant-type:ciba") + } + GrantType::Unknown(s) => f.write_str(s), + } + } +} + +impl core::str::FromStr for GrantType { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "authorization_code" => Ok(GrantType::AuthorizationCode), + "refresh_token" => Ok(GrantType::RefreshToken), + "implicit" => Ok(GrantType::Implicit), + "client_credentials" => Ok(GrantType::ClientCredentials), + "password" => Ok(GrantType::Password), + "urn:ietf:params:oauth:grant-type:device_code" => Ok(GrantType::DeviceCode), + "urn:ietf:params:oauth:grant-type:jwt-bearer" => Ok(GrantType::JwtBearer), + "urn:openid:params:grant-type:ciba" => { + Ok(GrantType::ClientInitiatedBackchannelAuthentication) + } + s => Ok(GrantType::Unknown(s.to_owned())), + } + } +} + +/// An enum representing the possible requests to the [Token Endpoint]. +/// +/// [Token Endpoint]: https://www.rfc-editor.org/rfc/rfc6749#section-3.2 +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[serde(tag = "grant_type", rename_all = "snake_case")] +#[non_exhaustive] +pub enum AccessTokenRequest { + /// A request in the Authorization Code flow. + AuthorizationCode(AuthorizationCodeGrant), + + /// A request to refresh an access token. + RefreshToken(RefreshTokenGrant), + + /// A request in the Client Credentials flow. + ClientCredentials(ClientCredentialsGrant), + + /// A request in the Device Code flow. + #[serde(rename = "urn:ietf:params:oauth:grant-type:device_code")] + DeviceCode(DeviceCodeGrant), + + /// An unsupported request. + #[serde(skip_serializing, other)] + Unsupported, +} + +impl AccessTokenRequest { + /// Returns the string representation of the grant type of the request. + #[must_use] + pub fn grant_type(&self) -> &'static str { + match self { + Self::AuthorizationCode(_) => "authorization_code", + Self::RefreshToken(_) => "refresh_token", + Self::ClientCredentials(_) => "client_credentials", + Self::DeviceCode(_) => "urn:ietf:params:oauth:grant-type:device_code", + Self::Unsupported => "unsupported", + } + } +} + +/// A successful response from the [Token Endpoint]. +/// +/// [Token Endpoint]: https://www.rfc-editor.org/rfc/rfc6749#section-3.2 +#[serde_as] +#[skip_serializing_none] +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct AccessTokenResponse { + /// The access token to access the requested scope. + pub access_token: String, + + /// The token to refresh the access token when it expires. + pub refresh_token: Option, + + /// ID Token value associated with the authenticated session. + // TODO: this should be somewhere else + pub id_token: Option, + + /// The type of the access token. + pub token_type: OAuthAccessTokenType, + + /// The duration for which the access token is valid. + #[serde_as(as = "Option>")] + pub expires_in: Option, + + /// The scope of the access token. + pub scope: Option, +} + +impl AccessTokenResponse { + /// Creates a new `AccessTokenResponse` with the given access token. + #[must_use] + pub fn new(access_token: String) -> AccessTokenResponse { + AccessTokenResponse { + access_token, + refresh_token: None, + id_token: None, + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: None, + } + } + + /// Adds a refresh token to an `AccessTokenResponse`. + #[must_use] + pub fn with_refresh_token(mut self, refresh_token: String) -> Self { + self.refresh_token = Some(refresh_token); + self + } + + /// Adds an ID token to an `AccessTokenResponse`. + #[must_use] + pub fn with_id_token(mut self, id_token: String) -> Self { + self.id_token = Some(id_token); + self + } + + /// Adds a scope to an `AccessTokenResponse`. + #[must_use] + pub fn with_scope(mut self, scope: Scope) -> Self { + self.scope = Some(scope); + self + } + + /// Adds an expiration duration to an `AccessTokenResponse`. + #[must_use] + pub fn with_expires_in(mut self, expires_in: Duration) -> Self { + self.expires_in = Some(expires_in); + self + } +} + +impl fmt::Debug for AccessTokenResponse { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("AccessTokenResponse") + .field("token_type", &self.token_type) + .field("expires_in", &self.expires_in) + .field("scope", &self.scope) + .finish_non_exhaustive() + } +} + +/// A request to the [Introspection Endpoint]. +/// +/// [Introspection Endpoint]: https://www.rfc-editor.org/rfc/rfc7662#section-2 +#[skip_serializing_none] +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct IntrospectionRequest { + /// The value of the token. + pub token: String, + + /// A hint about the type of the token submitted for introspection. + pub token_type_hint: Option, +} + +impl fmt::Debug for IntrospectionRequest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("IntrospectionRequest") + .field("token_type_hint", &self.token_type_hint) + .finish_non_exhaustive() + } +} + +/// A successful response from the [Introspection Endpoint]. +/// +/// [Introspection Endpoint]: https://www.rfc-editor.org/rfc/rfc7662#section-2 +#[serde_as] +#[skip_serializing_none] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)] +pub struct IntrospectionResponse { + /// Whether or not the presented token is currently active. + pub active: bool, + + /// The scope associated with the token. + pub scope: Option, + + /// Client identifier for the OAuth 2.0 client that requested this token. + pub client_id: Option, + + /// Human-readable identifier for the resource owner who authorized this + /// token. + pub username: Option, + + /// Type of the token. + pub token_type: Option, + + /// Timestamp indicating when the token will expire. + #[serde_as(as = "Option")] + pub exp: Option>, + + /// Relative timestamp indicating when the token will expire, + /// in seconds from the current instant. + #[serde_as(as = "Option>")] + pub expires_in: Option, + + /// Timestamp indicating when the token was issued. + #[serde_as(as = "Option")] + pub iat: Option>, + + /// Timestamp indicating when the token is not to be used before. + #[serde_as(as = "Option")] + pub nbf: Option>, + + /// Subject of the token. + pub sub: Option, + + /// Intended audience of the token. + pub aud: Option, + + /// Issuer of the token. + pub iss: Option, + + /// String identifier for the token. + pub jti: Option, + + /// MAS extension: explicit device ID + /// Only used for compatibility access and refresh tokens. + pub device_id: Option, +} + +/// A request to the [Revocation Endpoint]. +/// +/// [Revocation Endpoint]: https://www.rfc-editor.org/rfc/rfc7009#section-2 +#[skip_serializing_none] +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct RevocationRequest { + /// The value of the token. + pub token: String, + + /// A hint about the type of the token submitted for introspection. + pub token_type_hint: Option, +} + +impl fmt::Debug for RevocationRequest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RevocationRequest") + .field("token_type_hint", &self.token_type_hint) + .finish_non_exhaustive() + } +} + +/// A successful response from the [Pushed Authorization Request Endpoint]. +/// +/// Note that there is no request type because it is by definition the same as +/// [`AuthorizationRequest`]. +/// +/// [Pushed Authorization Request Endpoint]: https://datatracker.ietf.org/doc/html/rfc9126 +#[serde_as] +#[skip_serializing_none] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +pub struct PushedAuthorizationResponse { + /// The `request_uri` to use for the request to the authorization endpoint. + pub request_uri: String, + + /// The duration for which the request URI is valid. + #[serde_as(as = "DurationSeconds")] + pub expires_in: Duration, +} + +#[cfg(test)] +mod tests { + use serde_json::json; + + use super::*; + use crate::{scope::OPENID, test_utils::assert_serde_json}; + + #[test] + fn serde_refresh_token_grant() { + let expected = json!({ + "grant_type": "refresh_token", + "refresh_token": "abcd", + "scope": "openid", + }); + + // TODO: insert multiple scopes and test it. It's a bit tricky to test since + // HashSet have no guarantees regarding the ordering of items, so right + // now the output is unstable. + let scope: Option = Some(vec![OPENID].into_iter().collect()); + + let req = AccessTokenRequest::RefreshToken(RefreshTokenGrant { + refresh_token: "abcd".into(), + scope, + }); + + assert_serde_json(&req, expected); + } + + #[test] + fn serde_authorization_code_grant() { + let expected = json!({ + "grant_type": "authorization_code", + "code": "abcd", + "redirect_uri": "https://example.com/redirect", + }); + + let req = AccessTokenRequest::AuthorizationCode(AuthorizationCodeGrant { + code: "abcd".into(), + redirect_uri: Some("https://example.com/redirect".parse().unwrap()), + code_verifier: None, + }); + + assert_serde_json(&req, expected); + } + + #[test] + fn serialize_grant_type() { + assert_eq!( + serde_json::to_string(&GrantType::AuthorizationCode).unwrap(), + "\"authorization_code\"" + ); + assert_eq!( + serde_json::to_string(&GrantType::RefreshToken).unwrap(), + "\"refresh_token\"" + ); + assert_eq!( + serde_json::to_string(&GrantType::Implicit).unwrap(), + "\"implicit\"" + ); + assert_eq!( + serde_json::to_string(&GrantType::ClientCredentials).unwrap(), + "\"client_credentials\"" + ); + assert_eq!( + serde_json::to_string(&GrantType::Password).unwrap(), + "\"password\"" + ); + assert_eq!( + serde_json::to_string(&GrantType::DeviceCode).unwrap(), + "\"urn:ietf:params:oauth:grant-type:device_code\"" + ); + assert_eq!( + serde_json::to_string(&GrantType::ClientInitiatedBackchannelAuthentication).unwrap(), + "\"urn:openid:params:grant-type:ciba\"" + ); + } + + #[test] + fn deserialize_grant_type() { + assert_eq!( + serde_json::from_str::("\"authorization_code\"").unwrap(), + GrantType::AuthorizationCode + ); + assert_eq!( + serde_json::from_str::("\"refresh_token\"").unwrap(), + GrantType::RefreshToken + ); + assert_eq!( + serde_json::from_str::("\"implicit\"").unwrap(), + GrantType::Implicit + ); + assert_eq!( + serde_json::from_str::("\"client_credentials\"").unwrap(), + GrantType::ClientCredentials + ); + assert_eq!( + serde_json::from_str::("\"password\"").unwrap(), + GrantType::Password + ); + assert_eq!( + serde_json::from_str::("\"urn:ietf:params:oauth:grant-type:device_code\"") + .unwrap(), + GrantType::DeviceCode + ); + assert_eq!( + serde_json::from_str::("\"urn:openid:params:grant-type:ciba\"").unwrap(), + GrantType::ClientInitiatedBackchannelAuthentication + ); + } + + #[test] + fn serialize_response_mode() { + assert_eq!( + serde_json::to_string(&ResponseMode::Query).unwrap(), + "\"query\"" + ); + assert_eq!( + serde_json::to_string(&ResponseMode::Fragment).unwrap(), + "\"fragment\"" + ); + assert_eq!( + serde_json::to_string(&ResponseMode::FormPost).unwrap(), + "\"form_post\"" + ); + } + + #[test] + fn deserialize_response_mode() { + assert_eq!( + serde_json::from_str::("\"query\"").unwrap(), + ResponseMode::Query + ); + assert_eq!( + serde_json::from_str::("\"fragment\"").unwrap(), + ResponseMode::Fragment + ); + assert_eq!( + serde_json::from_str::("\"form_post\"").unwrap(), + ResponseMode::FormPost + ); + } + + #[test] + fn serialize_display() { + assert_eq!(serde_json::to_string(&Display::Page).unwrap(), "\"page\""); + assert_eq!(serde_json::to_string(&Display::Popup).unwrap(), "\"popup\""); + assert_eq!(serde_json::to_string(&Display::Touch).unwrap(), "\"touch\""); + assert_eq!(serde_json::to_string(&Display::Wap).unwrap(), "\"wap\""); + } + + #[test] + fn deserialize_display() { + assert_eq!( + serde_json::from_str::("\"page\"").unwrap(), + Display::Page + ); + assert_eq!( + serde_json::from_str::("\"popup\"").unwrap(), + Display::Popup + ); + assert_eq!( + serde_json::from_str::("\"touch\"").unwrap(), + Display::Touch + ); + assert_eq!( + serde_json::from_str::("\"wap\"").unwrap(), + Display::Wap + ); + } + + #[test] + fn serialize_prompt() { + assert_eq!(serde_json::to_string(&Prompt::None).unwrap(), "\"none\""); + assert_eq!(serde_json::to_string(&Prompt::Login).unwrap(), "\"login\""); + assert_eq!( + serde_json::to_string(&Prompt::Consent).unwrap(), + "\"consent\"" + ); + assert_eq!( + serde_json::to_string(&Prompt::SelectAccount).unwrap(), + "\"select_account\"" + ); + assert_eq!( + serde_json::to_string(&Prompt::Create).unwrap(), + "\"create\"" + ); + } + + #[test] + fn deserialize_prompt() { + assert_eq!( + serde_json::from_str::("\"none\"").unwrap(), + Prompt::None + ); + assert_eq!( + serde_json::from_str::("\"login\"").unwrap(), + Prompt::Login + ); + assert_eq!( + serde_json::from_str::("\"consent\"").unwrap(), + Prompt::Consent + ); + assert_eq!( + serde_json::from_str::("\"select_account\"").unwrap(), + Prompt::SelectAccount + ); + assert_eq!( + serde_json::from_str::("\"create\"").unwrap(), + Prompt::Create + ); + } +} diff --git a/matrix-authentication-service/crates/oauth2-types/src/response_type.rs b/matrix-authentication-service/crates/oauth2-types/src/response_type.rs new file mode 100644 index 00000000..1f3322e9 --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/response_type.rs @@ -0,0 +1,505 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! [Response types] in the OpenID Connect specification. +//! +//! [Response types]: https://openid.net/specs/openid-connect-core-1_0.html#Authentication + +#![allow(clippy::module_name_repetitions)] + +use std::{collections::BTreeSet, fmt, iter::FromIterator, str::FromStr}; + +use mas_iana::oauth::OAuthAuthorizationEndpointResponseType; +use serde_with::{DeserializeFromStr, SerializeDisplay}; +use thiserror::Error; + +/// An error encountered when trying to parse an invalid [`ResponseType`]. +#[derive(Debug, Error, Clone, PartialEq, Eq)] +#[error("invalid response type")] +pub struct InvalidResponseType; + +/// The accepted tokens in a [`ResponseType`]. +/// +/// `none` is not in this enum because it is represented by an empty +/// [`ResponseType`]. +/// +/// This type also accepts unknown tokens that can be constructed via it's +/// `FromStr` implementation or used via its `Display` implementation. +#[derive( + Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, SerializeDisplay, DeserializeFromStr, +)] +#[non_exhaustive] +pub enum ResponseTypeToken { + /// `code` + Code, + + /// `id_token` + IdToken, + + /// `token` + Token, + + /// Unknown token. + Unknown(String), +} + +impl core::fmt::Display for ResponseTypeToken { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + ResponseTypeToken::Code => f.write_str("code"), + ResponseTypeToken::IdToken => f.write_str("id_token"), + ResponseTypeToken::Token => f.write_str("token"), + ResponseTypeToken::Unknown(s) => f.write_str(s), + } + } +} + +impl core::str::FromStr for ResponseTypeToken { + type Err = core::convert::Infallible; + + fn from_str(s: &str) -> Result { + match s { + "code" => Ok(Self::Code), + "id_token" => Ok(Self::IdToken), + "token" => Ok(Self::Token), + s => Ok(Self::Unknown(s.to_owned())), + } + } +} + +/// An [OAuth 2.0 `response_type` value] that the client can use +/// at the [authorization endpoint]. +/// +/// It is recommended to construct this type from an +/// [`OAuthAuthorizationEndpointResponseType`]. +/// +/// [OAuth 2.0 `response_type` value]: https://www.rfc-editor.org/rfc/rfc7591#page-9 +/// [authorization endpoint]: https://www.rfc-editor.org/rfc/rfc6749.html#section-3.1 +#[derive(Debug, Clone, PartialEq, Eq, SerializeDisplay, DeserializeFromStr, PartialOrd, Ord)] +pub struct ResponseType(BTreeSet); + +impl std::ops::Deref for ResponseType { + type Target = BTreeSet; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl ResponseType { + /// Whether this response type requests a code. + #[must_use] + pub fn has_code(&self) -> bool { + self.0.contains(&ResponseTypeToken::Code) + } + + /// Whether this response type requests an ID token. + #[must_use] + pub fn has_id_token(&self) -> bool { + self.0.contains(&ResponseTypeToken::IdToken) + } + + /// Whether this response type requests a token. + #[must_use] + pub fn has_token(&self) -> bool { + self.0.contains(&ResponseTypeToken::Token) + } +} + +impl FromStr for ResponseType { + type Err = InvalidResponseType; + + fn from_str(s: &str) -> Result { + let s = s.trim(); + + if s.is_empty() { + Err(InvalidResponseType) + } else if s == "none" { + Ok(Self(BTreeSet::new())) + } else { + s.split_ascii_whitespace() + .map(|t| ResponseTypeToken::from_str(t).or(Err(InvalidResponseType))) + .collect::>() + } + } +} + +impl fmt::Display for ResponseType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut iter = self.iter(); + + // First item shouldn't have a leading space + if let Some(first) = iter.next() { + first.fmt(f)?; + } else { + // If the whole iterator is empty, write 'none' instead + write!(f, "none")?; + return Ok(()); + } + + // Write the other items with a leading space + for item in iter { + write!(f, " {item}")?; + } + + Ok(()) + } +} + +impl FromIterator for ResponseType { + fn from_iter>(iter: T) -> Self { + Self(BTreeSet::from_iter(iter)) + } +} + +impl From for ResponseType { + fn from(response_type: OAuthAuthorizationEndpointResponseType) -> Self { + match response_type { + OAuthAuthorizationEndpointResponseType::Code => Self([ResponseTypeToken::Code].into()), + OAuthAuthorizationEndpointResponseType::CodeIdToken => { + Self([ResponseTypeToken::Code, ResponseTypeToken::IdToken].into()) + } + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken => Self( + [ + ResponseTypeToken::Code, + ResponseTypeToken::IdToken, + ResponseTypeToken::Token, + ] + .into(), + ), + OAuthAuthorizationEndpointResponseType::CodeToken => { + Self([ResponseTypeToken::Code, ResponseTypeToken::Token].into()) + } + OAuthAuthorizationEndpointResponseType::IdToken => { + Self([ResponseTypeToken::IdToken].into()) + } + OAuthAuthorizationEndpointResponseType::IdTokenToken => { + Self([ResponseTypeToken::IdToken, ResponseTypeToken::Token].into()) + } + OAuthAuthorizationEndpointResponseType::None => Self(BTreeSet::new()), + OAuthAuthorizationEndpointResponseType::Token => { + Self([ResponseTypeToken::Token].into()) + } + } + } +} + +impl TryFrom for OAuthAuthorizationEndpointResponseType { + type Error = InvalidResponseType; + + fn try_from(response_type: ResponseType) -> Result { + if response_type + .iter() + .any(|t| matches!(t, ResponseTypeToken::Unknown(_))) + { + return Err(InvalidResponseType); + } + + let tokens = response_type.iter().collect::>(); + let res = match *tokens { + [ResponseTypeToken::Code] => OAuthAuthorizationEndpointResponseType::Code, + [ResponseTypeToken::IdToken] => OAuthAuthorizationEndpointResponseType::IdToken, + [ResponseTypeToken::Token] => OAuthAuthorizationEndpointResponseType::Token, + [ResponseTypeToken::Code, ResponseTypeToken::IdToken] => { + OAuthAuthorizationEndpointResponseType::CodeIdToken + } + [ResponseTypeToken::Code, ResponseTypeToken::Token] => { + OAuthAuthorizationEndpointResponseType::CodeToken + } + [ResponseTypeToken::IdToken, ResponseTypeToken::Token] => { + OAuthAuthorizationEndpointResponseType::IdTokenToken + } + [ + ResponseTypeToken::Code, + ResponseTypeToken::IdToken, + ResponseTypeToken::Token, + ] => OAuthAuthorizationEndpointResponseType::CodeIdTokenToken, + _ => OAuthAuthorizationEndpointResponseType::None, + }; + + Ok(res) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_response_type_token() { + assert_eq!( + serde_json::from_str::("\"code\"").unwrap(), + ResponseTypeToken::Code + ); + assert_eq!( + serde_json::from_str::("\"id_token\"").unwrap(), + ResponseTypeToken::IdToken + ); + assert_eq!( + serde_json::from_str::("\"token\"").unwrap(), + ResponseTypeToken::Token + ); + assert_eq!( + serde_json::from_str::("\"something_unsupported\"").unwrap(), + ResponseTypeToken::Unknown("something_unsupported".to_owned()) + ); + } + + #[test] + fn serialize_response_type_token() { + assert_eq!( + serde_json::to_string(&ResponseTypeToken::Code).unwrap(), + "\"code\"" + ); + assert_eq!( + serde_json::to_string(&ResponseTypeToken::IdToken).unwrap(), + "\"id_token\"" + ); + assert_eq!( + serde_json::to_string(&ResponseTypeToken::Token).unwrap(), + "\"token\"" + ); + assert_eq!( + serde_json::to_string(&ResponseTypeToken::Unknown( + "something_unsupported".to_owned() + )) + .unwrap(), + "\"something_unsupported\"" + ); + } + + #[test] + fn deserialize_response_type() { + serde_json::from_str::("\"\"").unwrap_err(); + + let res_type = serde_json::from_str::("\"none\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::None + ); + + let res_type = serde_json::from_str::("\"code\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Code)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::Code + ); + + let res_type = serde_json::from_str::("\"code\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Code)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::Code + ); + + let res_type = serde_json::from_str::("\"id_token\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::IdToken)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::IdToken + ); + + let res_type = serde_json::from_str::("\"token\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Token)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::Token + ); + + let res_type = serde_json::from_str::("\"something_unsupported\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!( + iter.next(), + Some(&ResponseTypeToken::Unknown( + "something_unsupported".to_owned() + )) + ); + assert_eq!(iter.next(), None); + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap_err(); + + let res_type = serde_json::from_str::("\"code id_token\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Code)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::IdToken)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::CodeIdToken + ); + + let res_type = serde_json::from_str::("\"code token\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Code)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Token)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::CodeToken + ); + + let res_type = serde_json::from_str::("\"id_token token\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::IdToken)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Token)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::IdTokenToken + ); + + let res_type = serde_json::from_str::("\"code id_token token\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Code)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::IdToken)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Token)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken + ); + + let res_type = + serde_json::from_str::("\"code id_token token something_unsupported\"") + .unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Code)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::IdToken)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Token)); + assert_eq!( + iter.next(), + Some(&ResponseTypeToken::Unknown( + "something_unsupported".to_owned() + )) + ); + assert_eq!(iter.next(), None); + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap_err(); + + // Order doesn't matter + let res_type = serde_json::from_str::("\"token code id_token\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Code)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::IdToken)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Token)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken + ); + + let res_type = + serde_json::from_str::("\"id_token token id_token code\"").unwrap(); + let mut iter = res_type.iter(); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Code)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::IdToken)); + assert_eq!(iter.next(), Some(&ResponseTypeToken::Token)); + assert_eq!(iter.next(), None); + assert_eq!( + OAuthAuthorizationEndpointResponseType::try_from(res_type).unwrap(), + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken + ); + } + + #[test] + fn serialize_response_type() { + assert_eq!( + serde_json::to_string(&ResponseType::from( + OAuthAuthorizationEndpointResponseType::None + )) + .unwrap(), + "\"none\"" + ); + assert_eq!( + serde_json::to_string(&ResponseType::from( + OAuthAuthorizationEndpointResponseType::Code + )) + .unwrap(), + "\"code\"" + ); + assert_eq!( + serde_json::to_string(&ResponseType::from( + OAuthAuthorizationEndpointResponseType::IdToken + )) + .unwrap(), + "\"id_token\"" + ); + assert_eq!( + serde_json::to_string(&ResponseType::from( + OAuthAuthorizationEndpointResponseType::CodeIdToken + )) + .unwrap(), + "\"code id_token\"" + ); + assert_eq!( + serde_json::to_string(&ResponseType::from( + OAuthAuthorizationEndpointResponseType::CodeToken + )) + .unwrap(), + "\"code token\"" + ); + assert_eq!( + serde_json::to_string(&ResponseType::from( + OAuthAuthorizationEndpointResponseType::IdTokenToken + )) + .unwrap(), + "\"id_token token\"" + ); + assert_eq!( + serde_json::to_string(&ResponseType::from( + OAuthAuthorizationEndpointResponseType::CodeIdTokenToken + )) + .unwrap(), + "\"code id_token token\"" + ); + + assert_eq!( + serde_json::to_string( + &[ + ResponseTypeToken::Unknown("something_unsupported".to_owned()), + ResponseTypeToken::Code + ] + .into_iter() + .collect::() + ) + .unwrap(), + "\"code something_unsupported\"" + ); + + // Order doesn't matter. + let res = [ + ResponseTypeToken::IdToken, + ResponseTypeToken::Token, + ResponseTypeToken::Code, + ] + .into_iter() + .collect::(); + assert_eq!( + serde_json::to_string(&res).unwrap(), + "\"code id_token token\"" + ); + + let res = [ + ResponseTypeToken::Code, + ResponseTypeToken::Token, + ResponseTypeToken::IdToken, + ] + .into_iter() + .collect::(); + assert_eq!( + serde_json::to_string(&res).unwrap(), + "\"code id_token token\"" + ); + } +} diff --git a/matrix-authentication-service/crates/oauth2-types/src/scope.rs b/matrix-authentication-service/crates/oauth2-types/src/scope.rs new file mode 100644 index 00000000..f9832b5c --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/scope.rs @@ -0,0 +1,266 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Types to define an [access token's scope]. +//! +//! [access token's scope]: https://www.rfc-editor.org/rfc/rfc6749#section-3.3 + +#![allow(clippy::module_name_repetitions)] + +use std::{ + borrow::Cow, + collections::BTreeSet, + iter::FromIterator, + ops::{Deref, DerefMut}, + str::FromStr, +}; + +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +/// The error type returned when a scope is invalid. +#[derive(Debug, Error, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[error("Invalid scope format")] +pub struct InvalidScope; + +/// A scope token or scope value. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ScopeToken(Cow<'static, str>); + +impl ScopeToken { + /// Create a `ScopeToken` from a static string. The validity of it is not + /// checked since it has to be valid in const contexts + #[must_use] + pub const fn from_static(token: &'static str) -> Self { + Self(Cow::Borrowed(token)) + } + + /// Get the scope token as a string slice. + #[must_use] + pub fn as_str(&self) -> &str { + self.0.as_ref() + } +} + +/// `openid`. +/// +/// Must be included in OpenID Connect requests. +pub const OPENID: ScopeToken = ScopeToken::from_static("openid"); + +/// `profile`. +/// +/// Requests access to the End-User's default profile Claims. +pub const PROFILE: ScopeToken = ScopeToken::from_static("profile"); + +/// `email`. +/// +/// Requests access to the `email` and `email_verified` Claims. +pub const EMAIL: ScopeToken = ScopeToken::from_static("email"); + +/// `address`. +/// +/// Requests access to the `address` Claim. +pub const ADDRESS: ScopeToken = ScopeToken::from_static("address"); + +/// `phone`. +/// +/// Requests access to the `phone_number` and `phone_number_verified` Claims. +pub const PHONE: ScopeToken = ScopeToken::from_static("phone"); + +/// `offline_access`. +/// +/// Requests that an OAuth 2.0 Refresh Token be issued that can be used to +/// obtain an Access Token that grants access to the End-User's Userinfo +/// Endpoint even when the End-User is not present (not logged in). +pub const OFFLINE_ACCESS: ScopeToken = ScopeToken::from_static("offline_access"); + +// As per RFC6749 appendix A: +// https://datatracker.ietf.org/doc/html/rfc6749#appendix-A +// +// NQCHAR = %x21 / %x23-5B / %x5D-7E +fn nqchar(c: char) -> bool { + '\x21' == c || ('\x23'..'\x5B').contains(&c) || ('\x5D'..'\x7E').contains(&c) +} + +impl FromStr for ScopeToken { + type Err = InvalidScope; + + fn from_str(s: &str) -> Result { + // As per RFC6749 appendix A.4: + // https://datatracker.ietf.org/doc/html/rfc6749#appendix-A.4 + // + // scope-token = 1*NQCHAR + if !s.is_empty() && s.chars().all(nqchar) { + Ok(ScopeToken(Cow::Owned(s.into()))) + } else { + Err(InvalidScope) + } + } +} + +impl Deref for ScopeToken { + type Target = str; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl std::fmt::Display for ScopeToken { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +/// A scope. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Scope(BTreeSet); + +impl Deref for Scope { + type Target = BTreeSet; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for Scope { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl FromStr for Scope { + type Err = InvalidScope; + + fn from_str(s: &str) -> Result { + // As per RFC6749 appendix A.4: + // https://datatracker.ietf.org/doc/html/rfc6749#appendix-A.4 + // + // scope = scope-token *( SP scope-token ) + let scopes: Result, InvalidScope> = + s.split(' ').map(ScopeToken::from_str).collect(); + + Ok(Self(scopes?)) + } +} + +impl Scope { + /// Whether this `Scope` is empty. + #[must_use] + pub fn is_empty(&self) -> bool { + // This should never be the case? + self.0.is_empty() + } + + /// The number of tokens in the `Scope`. + #[must_use] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Whether this `Scope` contains the given value. + #[must_use] + pub fn contains(&self, token: &str) -> bool { + ScopeToken::from_str(token) + .map(|token| self.0.contains(&token)) + .unwrap_or(false) + } + + /// Inserts the given token in this `Scope`. + /// + /// Returns whether the token was newly inserted. + pub fn insert(&mut self, value: ScopeToken) -> bool { + self.0.insert(value) + } +} + +impl std::fmt::Display for Scope { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for (index, token) in self.0.iter().enumerate() { + if index == 0 { + write!(f, "{token}")?; + } else { + write!(f, " {token}")?; + } + } + + Ok(()) + } +} + +impl Serialize for Scope { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.to_string().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for Scope { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + // FIXME: seems like there is an unnecessary clone here? + let scope: String = Deserialize::deserialize(deserializer)?; + Scope::from_str(&scope).map_err(serde::de::Error::custom) + } +} + +impl FromIterator for Scope { + fn from_iter>(iter: T) -> Self { + Self(BTreeSet::from_iter(iter)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_scope_token() { + assert_eq!(ScopeToken::from_str("openid"), Ok(OPENID)); + + assert_eq!(ScopeToken::from_str("invalid\\scope"), Err(InvalidScope)); + } + + #[test] + fn parse_scope() { + let scope = Scope::from_str("openid profile address").unwrap(); + assert_eq!(scope.len(), 3); + assert!(scope.contains("openid")); + assert!(scope.contains("profile")); + assert!(scope.contains("address")); + assert!(!scope.contains("unknown")); + + assert!( + Scope::from_str("").is_err(), + "there should always be at least one token in the scope" + ); + + assert!(Scope::from_str("invalid\\scope").is_err()); + assert!(Scope::from_str("no double space").is_err()); + assert!(Scope::from_str(" no leading space").is_err()); + assert!(Scope::from_str("no trailing space ").is_err()); + + let scope = Scope::from_str("openid").unwrap(); + assert_eq!(scope.len(), 1); + assert!(scope.contains("openid")); + assert!(!scope.contains("profile")); + assert!(!scope.contains("address")); + + assert_eq!( + Scope::from_str("order does not matter"), + Scope::from_str("matter not order does"), + ); + + assert!(Scope::from_str("http://example.com").is_ok()); + assert!(Scope::from_str("urn:matrix:client:api:*").is_ok()); + assert!(Scope::from_str("urn:matrix:org.matrix.msc2967.client:api:*").is_ok()); + } +} diff --git a/matrix-authentication-service/crates/oauth2-types/src/test_utils.rs b/matrix-authentication-service/crates/oauth2-types/src/test_utils.rs new file mode 100644 index 00000000..69e58f9e --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/test_utils.rs @@ -0,0 +1,22 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::fmt::Debug; + +use serde::{Serialize, de::DeserializeOwned}; + +#[track_caller] +pub(crate) fn assert_serde_json( + got: &T, + expected_value: serde_json::Value, +) { + let got_value = serde_json::to_value(got).expect("could not serialize object as JSON value"); + assert_eq!(got_value, expected_value); + + let expected: T = serde_json::from_value(expected_value) + .expect("could not deserialize object from JSON value"); + assert_eq!(got, &expected); +} diff --git a/matrix-authentication-service/crates/oauth2-types/src/webfinger.rs b/matrix-authentication-service/crates/oauth2-types/src/webfinger.rs new file mode 100644 index 00000000..34e13232 --- /dev/null +++ b/matrix-authentication-service/crates/oauth2-types/src/webfinger.rs @@ -0,0 +1,92 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Types for provider discovery using [Webfinger]. +//! +//! [Webfinger]: https://www.rfc-editor.org/rfc/rfc7033 + +use serde::{Deserialize, Serialize}; +use url::Url; + +/// The response of the Webfinger endpoint. +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] +pub struct WebFingerResponse { + /// A URI that identifies the entity described by the response. + subject: String, + + /// Links that describe the subject. + links: Vec, +} + +impl WebFingerResponse { + /// Creates a new `WebFingerResponse` with the given subject. + #[must_use] + pub const fn new(subject: String) -> Self { + Self { + subject, + links: Vec::new(), + } + } + + /// Adds the given link to this `WebFingerResponse`. + #[must_use] + pub fn with_link(mut self, link: WebFingerLink) -> Self { + self.links.push(link); + self + } + + /// Adds the given issuer to this `WebFingerResponse`. + #[must_use] + pub fn with_issuer(self, issuer: Url) -> Self { + self.with_link(WebFingerLink::issuer(issuer)) + } +} + +/// A link in a Webfinger response. +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] +#[serde(tag = "rel")] +pub enum WebFingerLink { + /// An OpenID Connect issuer. + #[serde(rename = "http://openid.net/specs/connect/1.0/issuer")] + OidcIssuer { + /// The URL of the issuer. + href: Url, + }, +} + +impl WebFingerLink { + /// Creates a new `WebFingerLink` for an OpenID Connect issuer. + #[must_use] + pub const fn issuer(href: Url) -> Self { + Self::OidcIssuer { href } + } +} + +#[cfg(test)] +mod tests { + use serde_json::json; + + use super::*; + + #[test] + fn serialize_webfinger_response_test() { + let res = WebFingerResponse::new("acct:john@example.com".to_owned()) + .with_issuer(Url::parse("https://account.example.com/").unwrap()); + + let res = serde_json::to_value(res).unwrap(); + + assert_eq!( + res, + json!({ + "subject": "acct:john@example.com", + "links": [{ + "rel": "http://openid.net/specs/connect/1.0/issuer", + "href": "https://account.example.com/", + }] + }) + ); + } +} diff --git a/matrix-authentication-service/crates/oidc-client/Cargo.toml b/matrix-authentication-service/crates/oidc-client/Cargo.toml new file mode 100644 index 00000000..3fffd584 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/Cargo.toml @@ -0,0 +1,53 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-oidc-client" +description = "OpenID Connect client library used by the Matrix Authentication Service" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +async-trait.workspace = true +base64ct.workspace = true +chrono.workspace = true +elliptic-curve.workspace = true +form_urlencoded.workspace = true +headers.workspace = true +http.workspace = true +language-tags.workspace = true +mime.workspace = true +p256.workspace = true +rand.workspace = true +reqwest.workspace = true +serde_json.workspace = true +serde_urlencoded.workspace = true +serde.workspace = true +thiserror.workspace = true +tracing.workspace = true +url.workspace = true + +mas-http.workspace = true +mas-iana.workspace = true +mas-jose.workspace = true +mas-keystore.workspace = true +oauth2-types.workspace = true + +[dev-dependencies] +assert_matches.workspace = true +bitflags.workspace = true +http-body-util.workspace = true +rand_chacha.workspace = true +rustls.workspace = true +tokio.workspace = true +wiremock.workspace = true diff --git a/matrix-authentication-service/crates/oidc-client/src/error.rs b/matrix-authentication-service/crates/oidc-client/src/error.rs new file mode 100644 index 00000000..0f424464 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/error.rs @@ -0,0 +1,312 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! The error types used in this crate. + +use async_trait::async_trait; +use mas_jose::{ + claims::ClaimError, + jwa::InvalidAlgorithm, + jwt::{JwtDecodeError, JwtSignatureError, NoKeyWorked}, +}; +use oauth2_types::{oidc::ProviderMetadataVerificationError, pkce::CodeChallengeError}; +use serde::Deserialize; +use thiserror::Error; + +/// All possible errors when using this crate. +#[derive(Debug, Error)] +#[error(transparent)] +pub enum Error { + /// An error occurred fetching provider metadata. + Discovery(#[from] DiscoveryError), + + /// An error occurred fetching the provider JWKS. + Jwks(#[from] JwksError), + + /// An error occurred building the authorization URL. + Authorization(#[from] AuthorizationError), + + /// An error occurred exchanging an authorization code for an access token. + TokenAuthorizationCode(#[from] TokenAuthorizationCodeError), + + /// An error occurred requesting an access token with client credentials. + TokenClientCredentials(#[from] TokenRequestError), + + /// An error occurred refreshing an access token. + TokenRefresh(#[from] TokenRefreshError), + + /// An error occurred requesting user info. + UserInfo(#[from] UserInfoError), +} + +/// All possible errors when fetching provider metadata. +#[derive(Debug, Error)] +#[error("Fetching provider metadata failed")] +pub enum DiscoveryError { + /// An error occurred building the request's URL. + IntoUrl(#[from] url::ParseError), + + /// The server returned an HTTP error status code. + Http(#[from] reqwest::Error), + + /// An error occurred validating the metadata. + Validation(#[from] ProviderMetadataVerificationError), + + /// The provider doesn't have an issuer set, which is required if discovery + /// is enabled. + #[error("Provider doesn't have an issuer set")] + MissingIssuer, + + /// Discovery is disabled for this provider. + #[error("Discovery is disabled for this provider")] + Disabled, +} + +/// All possible errors when authorizing the client. +#[derive(Debug, Error)] +#[error("Building the authorization URL failed")] +pub enum AuthorizationError { + /// An error occurred constructing the PKCE code challenge. + Pkce(#[from] CodeChallengeError), + + /// An error occurred serializing the request. + UrlEncoded(#[from] serde_urlencoded::ser::Error), +} + +/// All possible errors when requesting an access token. +#[derive(Debug, Error)] +#[error("Request to the token endpoint failed")] +pub enum TokenRequestError { + /// The HTTP client returned an error. + Http(#[from] reqwest::Error), + + /// The server returned an error + OAuth2(#[from] OAuth2Error), + + /// Error while injecting the client credentials into the request. + Credentials(#[from] CredentialsError), +} + +/// All possible errors when exchanging a code for an access token. +#[derive(Debug, Error)] +pub enum TokenAuthorizationCodeError { + /// An error occurred requesting the access token. + #[error(transparent)] + Token(#[from] TokenRequestError), + + /// An error occurred validating the ID Token. + #[error("Verifying the 'id_token' returned by the provider failed")] + IdToken(#[from] IdTokenError), +} + +/// All possible errors when refreshing an access token. +#[derive(Debug, Error)] +pub enum TokenRefreshError { + /// An error occurred requesting the access token. + #[error(transparent)] + Token(#[from] TokenRequestError), + + /// An error occurred validating the ID Token. + #[error("Verifying the 'id_token' returned by the provider failed")] + IdToken(#[from] IdTokenError), +} + +/// All possible errors when requesting user info. +#[derive(Debug, Error)] +pub enum UserInfoError { + /// The content-type header is missing from the response. + #[error("missing response content-type")] + MissingResponseContentType, + + /// The content-type is not valid. + #[error("invalid response content-type")] + InvalidResponseContentTypeValue, + + /// The content-type is not the one that was expected. + #[error("unexpected response content-type {got:?}, expected {expected:?}")] + UnexpectedResponseContentType { + /// The expected content-type. + expected: String, + /// The returned content-type. + got: String, + }, + + /// An error occurred verifying the Id Token. + #[error("Verifying the 'id_token' returned by the provider failed")] + IdToken(#[from] IdTokenError), + + /// An error occurred sending the request. + #[error(transparent)] + Http(#[from] reqwest::Error), + + /// The server returned an error + #[error(transparent)] + OAuth2(#[from] OAuth2Error), +} + +/// All possible errors when requesting a JWKS. +#[derive(Debug, Error)] +#[error("Failed to fetch JWKS")] +pub enum JwksError { + /// An error occurred sending the request. + Http(#[from] reqwest::Error), +} + +/// All possible errors when verifying a JWT. +#[derive(Debug, Error)] +pub enum JwtVerificationError { + /// An error occured decoding the JWT. + #[error(transparent)] + JwtDecode(#[from] JwtDecodeError), + + /// No key worked for verifying the JWT's signature. + #[error(transparent)] + JwtSignature(#[from] NoKeyWorked), + + /// An error occurred extracting a claim. + #[error(transparent)] + Claim(#[from] ClaimError), + + /// The algorithm used for signing the JWT is not the one that was + /// requested. + #[error("wrong signature alg")] + WrongSignatureAlg, +} + +/// All possible errors when verifying an ID token. +#[derive(Debug, Error)] +pub enum IdTokenError { + /// No ID Token was found in the response although one was expected. + #[error("ID token is missing")] + MissingIdToken, + + /// The ID Token from the latest Authorization was not provided although + /// this request expects to be verified against one. + #[error("Authorization ID token is missing")] + MissingAuthIdToken, + + #[error(transparent)] + /// An error occurred validating the ID Token's signature and basic claims. + Jwt(#[from] JwtVerificationError), + + #[error(transparent)] + /// An error occurred extracting a claim. + Claim(#[from] ClaimError), + + /// The subject identifier returned by the issuer is not the same as the one + /// we got before. + #[error("wrong subject identifier")] + WrongSubjectIdentifier, + + /// The authentication time returned by the issuer is not the same as the + /// one we got before. + #[error("wrong authentication time")] + WrongAuthTime, +} + +/// All errors that can occur when adding client credentials to the request. +#[derive(Debug, Error)] +pub enum CredentialsError { + /// Trying to use an unsupported authentication method. + #[error("unsupported authentication method")] + UnsupportedMethod, + + /// When authenticationg with `private_key_jwt`, no private key was found + /// for the given algorithm. + #[error("no private key was found for the given algorithm")] + NoPrivateKeyFound, + + /// The signing algorithm is invalid for this authentication method. + #[error("invalid algorithm: {0}")] + InvalidSigningAlgorithm(#[from] InvalidAlgorithm), + + /// An error occurred when building the claims of the JWT. + #[error(transparent)] + JwtClaims(#[from] ClaimError), + + /// The key found cannot be used with the algorithm. + #[error("Wrong algorithm for key")] + JwtWrongAlgorithm, + + /// An error occurred when signing the JWT. + #[error(transparent)] + JwtSignature(#[from] JwtSignatureError), +} + +#[derive(Debug, Deserialize)] +struct OAuth2ErrorResponse { + error: String, + error_description: Option, + error_uri: Option, +} + +impl std::fmt::Display for OAuth2ErrorResponse { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.error)?; + + if let Some(error_uri) = &self.error_uri { + write!(f, " (See {error_uri})")?; + } + + if let Some(error_description) = &self.error_description { + write!(f, ": {error_description}")?; + } + + Ok(()) + } +} + +/// An error returned by the OAuth 2.0 provider +#[derive(Debug, Error)] +pub struct OAuth2Error { + error: Option, + + #[source] + inner: reqwest::Error, +} + +impl std::fmt::Display for OAuth2Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(error) = &self.error { + write!( + f, + "Request to the provider failed with the following error: {error}" + ) + } else { + write!(f, "Request to the provider failed") + } + } +} + +impl From for OAuth2Error { + fn from(inner: reqwest::Error) -> Self { + Self { error: None, inner } + } +} + +/// An extension trait to deal with error responses from the OAuth 2.0 provider +#[async_trait] +pub(crate) trait ResponseExt { + async fn error_from_oauth2_error_response(self) -> Result + where + Self: Sized; +} + +#[async_trait] +impl ResponseExt for reqwest::Response { + async fn error_from_oauth2_error_response(self) -> Result { + let Err(inner) = self.error_for_status_ref() else { + return Ok(self); + }; + + let error: OAuth2ErrorResponse = self.json().await?; + + Err(OAuth2Error { + error: Some(error), + inner, + }) + } +} diff --git a/matrix-authentication-service/crates/oidc-client/src/lib.rs b/matrix-authentication-service/crates/oidc-client/src/lib.rs new file mode 100644 index 00000000..2b35896b --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/lib.rs @@ -0,0 +1,66 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! An [OpenID Connect] client library for the [Matrix] specification. +//! +//! This is part of the [Matrix Authentication Service] project. +//! +//! # Scope +//! +//! The scope of this crate is to support OIDC features required by the +//! Matrix specification according to [MSC3861] and its sub-proposals. +//! +//! As such, it is compatible with the OpenID Connect 1.0 specification, but +//! also enforces Matrix-specific requirements or adds compatibility with new +//! [OAuth 2.0] features. +//! +//! # OpenID Connect and OAuth 2.0 Features +//! +//! - Grant Types: +//! - [Authorization Code](https://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth) +//! - [Client Credentials](https://www.rfc-editor.org/rfc/rfc6749#section-4.4) +//! - [Device Code](https://www.rfc-editor.org/rfc/rfc8628) (TBD) +//! - [Refresh Token](https://openid.net/specs/openid-connect-core-1_0.html#RefreshTokens) +//! - [User Info](https://openid.net/specs/openid-connect-core-1_0.html#UserInfo) +//! - [PKCE](https://www.rfc-editor.org/rfc/rfc7636) +//! +//! # Matrix features +//! +//! - Client registration +//! - Login +//! - Matrix API Scopes +//! - Logout +//! +//! [OpenID Connect]: https://openid.net/connect/ +//! [Matrix]: https://matrix.org/ +//! [Matrix Authentication Service]: https://github.com/element-hq/matrix-authentication-service +//! [MSC3861]: https://github.com/matrix-org/matrix-spec-proposals/pull/3861 +//! [OAuth 2.0]: https://oauth.net/2/ + +#![deny(missing_docs)] +#![allow(clippy::module_name_repetitions, clippy::implicit_hasher)] + +pub mod error; +pub mod requests; +pub mod types; + +use std::fmt; + +#[doc(inline)] +pub use mas_jose as jose; + +// Wrapper around `String` that cannot be used in a meaningful way outside of +// this crate. Used for string enums that only allow certain characters because +// their variant can't be private. +#[doc(hidden)] +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct PrivString(String); + +impl fmt::Debug for PrivString { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} diff --git a/matrix-authentication-service/crates/oidc-client/src/requests/authorization_code.rs b/matrix-authentication-service/crates/oidc-client/src/requests/authorization_code.rs new file mode 100644 index 00000000..4965e13d --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/requests/authorization_code.rs @@ -0,0 +1,462 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Requests for the [Authorization Code flow]. +//! +//! [Authorization Code flow]: https://openid.net/specs/openid-connect-core-1_0.html#CodeFlowAuth + +use std::{collections::HashSet, num::NonZeroU32}; + +use base64ct::{Base64UrlUnpadded, Encoding}; +use chrono::{DateTime, Utc}; +use language_tags::LanguageTag; +use mas_iana::oauth::{OAuthAuthorizationEndpointResponseType, PkceCodeChallengeMethod}; +use mas_jose::claims::{self, TokenHash}; +use oauth2_types::{ + pkce, + prelude::CodeChallengeMethodExt, + requests::{ + AccessTokenRequest, AccessTokenResponse, AuthorizationCodeGrant, AuthorizationRequest, + Display, Prompt, ResponseMode, + }, + scope::{OPENID, Scope}, +}; +use rand::{ + Rng, + distributions::{Alphanumeric, DistString}, +}; +use serde::Serialize; +use url::Url; + +use super::jose::JwtVerificationData; +use crate::{ + error::{AuthorizationError, IdTokenError, TokenAuthorizationCodeError}, + requests::{jose::verify_id_token, token::request_access_token}, + types::{IdToken, client_credentials::ClientCredentials}, +}; + +/// The data necessary to build an authorization request. +#[derive(Debug, Clone)] +pub struct AuthorizationRequestData { + /// The ID obtained when registering the client. + pub client_id: String, + + /// The scope to authorize. + /// + /// If the OpenID Connect scope token (`openid`) is not included, it will be + /// added. + pub scope: Scope, + + /// The URI to redirect the end-user to after the authorization. + /// + /// It must be one of the redirect URIs provided during registration. + pub redirect_uri: Url, + + /// The PKCE methods supported by the issuer. + /// + /// This field should be cloned from the provider metadata. If it is not + /// set, this security measure will not be used. + pub code_challenge_methods_supported: Option>, + + /// How the Authorization Server should display the authentication and + /// consent user interface pages to the End-User. + pub display: Option, + + /// Whether the Authorization Server should prompt the End-User for + /// reauthentication and consent. + /// + /// If [`Prompt::None`] is used, it must be the only value. + pub prompt: Option>, + + /// The allowable elapsed time in seconds since the last time the End-User + /// was actively authenticated by the OpenID Provider. + pub max_age: Option, + + /// End-User's preferred languages and scripts for the user interface. + pub ui_locales: Option>, + + /// ID Token previously issued by the Authorization Server being passed as a + /// hint about the End-User's current or past authenticated session with the + /// Client. + pub id_token_hint: Option, + + /// Hint to the Authorization Server about the login identifier the End-User + /// might use to log in. + pub login_hint: Option, + + /// Requested Authentication Context Class Reference values. + pub acr_values: Option>, + + /// Requested response mode. + pub response_mode: Option, +} + +impl AuthorizationRequestData { + /// Constructs a new `AuthorizationRequestData` with all the required + /// fields. + #[must_use] + pub fn new(client_id: String, scope: Scope, redirect_uri: Url) -> Self { + Self { + client_id, + scope, + redirect_uri, + code_challenge_methods_supported: None, + display: None, + prompt: None, + max_age: None, + ui_locales: None, + id_token_hint: None, + login_hint: None, + acr_values: None, + response_mode: None, + } + } + + /// Set the `code_challenge_methods_supported` field of this + /// `AuthorizationRequestData`. + #[must_use] + pub fn with_code_challenge_methods_supported( + mut self, + code_challenge_methods_supported: Vec, + ) -> Self { + self.code_challenge_methods_supported = Some(code_challenge_methods_supported); + self + } + + /// Set the `display` field of this `AuthorizationRequestData`. + #[must_use] + pub fn with_display(mut self, display: Display) -> Self { + self.display = Some(display); + self + } + + /// Set the `prompt` field of this `AuthorizationRequestData`. + #[must_use] + pub fn with_prompt(mut self, prompt: Vec) -> Self { + self.prompt = Some(prompt); + self + } + + /// Set the `max_age` field of this `AuthorizationRequestData`. + #[must_use] + pub fn with_max_age(mut self, max_age: NonZeroU32) -> Self { + self.max_age = Some(max_age); + self + } + + /// Set the `ui_locales` field of this `AuthorizationRequestData`. + #[must_use] + pub fn with_ui_locales(mut self, ui_locales: Vec) -> Self { + self.ui_locales = Some(ui_locales); + self + } + + /// Set the `id_token_hint` field of this `AuthorizationRequestData`. + #[must_use] + pub fn with_id_token_hint(mut self, id_token_hint: String) -> Self { + self.id_token_hint = Some(id_token_hint); + self + } + + /// Set the `login_hint` field of this `AuthorizationRequestData`. + #[must_use] + pub fn with_login_hint(mut self, login_hint: String) -> Self { + self.login_hint = Some(login_hint); + self + } + + /// Set the `acr_values` field of this `AuthorizationRequestData`. + #[must_use] + pub fn with_acr_values(mut self, acr_values: HashSet) -> Self { + self.acr_values = Some(acr_values); + self + } + + /// Set the `response_mode` field of this `AuthorizationRequestData`. + #[must_use] + pub fn with_response_mode(mut self, response_mode: ResponseMode) -> Self { + self.response_mode = Some(response_mode); + self + } +} + +/// The data necessary to validate a response from the Token endpoint in the +/// Authorization Code flow. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AuthorizationValidationData { + /// A unique identifier for the request. + pub state: String, + + /// A string to mitigate replay attacks. + /// Used when the `openid` scope is set (and therefore we are using OpenID + /// Connect). + pub nonce: Option, + + /// The URI where the end-user will be redirected after authorization. + pub redirect_uri: Url, + + /// A string to correlate the authorization request to the token request. + pub code_challenge_verifier: Option, +} + +#[derive(Clone, Serialize)] +struct FullAuthorizationRequest { + #[serde(flatten)] + inner: AuthorizationRequest, + + #[serde(flatten, skip_serializing_if = "Option::is_none")] + pkce: Option, +} + +/// Build the authorization request. +fn build_authorization_request( + authorization_data: AuthorizationRequestData, + rng: &mut impl Rng, +) -> Result<(FullAuthorizationRequest, AuthorizationValidationData), AuthorizationError> { + let AuthorizationRequestData { + client_id, + scope, + redirect_uri, + code_challenge_methods_supported, + display, + prompt, + max_age, + ui_locales, + id_token_hint, + login_hint, + acr_values, + response_mode, + } = authorization_data; + + let is_openid = scope.contains(&OPENID); + + // Generate a random CSRF "state" token and a nonce. + let state = Alphanumeric.sample_string(rng, 16); + + // Generate a random nonce if we're in 'OpenID Connect' mode + let nonce = is_openid.then(|| Alphanumeric.sample_string(rng, 16)); + + // Use PKCE, whenever possible. + let (pkce, code_challenge_verifier) = if code_challenge_methods_supported + .iter() + .any(|methods| methods.contains(&PkceCodeChallengeMethod::S256)) + { + let mut verifier = [0u8; 32]; + rng.fill(&mut verifier); + + let method = PkceCodeChallengeMethod::S256; + let verifier = Base64UrlUnpadded::encode_string(&verifier); + let code_challenge = method.compute_challenge(&verifier)?.into(); + + let pkce = pkce::AuthorizationRequest { + code_challenge_method: method, + code_challenge, + }; + + (Some(pkce), Some(verifier)) + } else { + (None, None) + }; + + let auth_request = FullAuthorizationRequest { + inner: AuthorizationRequest { + response_type: OAuthAuthorizationEndpointResponseType::Code.into(), + client_id, + redirect_uri: Some(redirect_uri.clone()), + scope, + state: Some(state.clone()), + response_mode, + nonce: nonce.clone(), + display, + prompt, + max_age, + ui_locales, + id_token_hint, + login_hint, + acr_values, + request: None, + request_uri: None, + registration: None, + }, + pkce, + }; + + let auth_data = AuthorizationValidationData { + state, + nonce, + redirect_uri, + code_challenge_verifier, + }; + + Ok((auth_request, auth_data)) +} + +/// Build the URL for authenticating at the Authorization endpoint. +/// +/// # Arguments +/// +/// * `authorization_endpoint` - The URL of the issuer's authorization endpoint. +/// +/// * `authorization_data` - The data necessary to build the authorization +/// request. +/// +/// * `rng` - A random number generator. +/// +/// # Returns +/// +/// A URL to be opened in a web browser where the end-user will be able to +/// authorize the given scope, and the [`AuthorizationValidationData`] to +/// validate this request. +/// +/// The redirect URI will receive parameters in its query: +/// +/// * A successful response will receive a `code` and a `state`. +/// +/// * If the authorization fails, it should receive an `error` parameter with a +/// [`ClientErrorCode`] and optionally an `error_description`. +/// +/// # Errors +/// +/// Returns an error if preparing the URL fails. +/// +/// [`VerifiedClientMetadata`]: oauth2_types::registration::VerifiedClientMetadata +/// [`ClientErrorCode`]: oauth2_types::errors::ClientErrorCode +pub fn build_authorization_url( + authorization_endpoint: Url, + authorization_data: AuthorizationRequestData, + rng: &mut impl Rng, +) -> Result<(Url, AuthorizationValidationData), AuthorizationError> { + tracing::debug!( + scope = ?authorization_data.scope, + "Authorizing..." + ); + + let (authorization_request, validation_data) = + build_authorization_request(authorization_data, rng)?; + + let authorization_query = serde_urlencoded::to_string(authorization_request)?; + + let mut authorization_url = authorization_endpoint; + + // Add our parameters to the query, because the URL might already have one. + let mut full_query = authorization_url + .query() + .map(ToOwned::to_owned) + .unwrap_or_default(); + if !full_query.is_empty() { + full_query.push('&'); + } + full_query.push_str(&authorization_query); + + authorization_url.set_query(Some(&full_query)); + + Ok((authorization_url, validation_data)) +} + +/// Exchange an authorization code for an access token. +/// +/// This should be used as the first step for logging in, and to request a +/// token with a new scope. +/// +/// # Arguments +/// +/// * `http_client` - The reqwest client to use for making HTTP requests. +/// +/// * `client_credentials` - The credentials obtained when registering the +/// client. +/// +/// * `token_endpoint` - The URL of the issuer's Token endpoint. +/// +/// * `code` - The authorization code returned at the Authorization endpoint. +/// +/// * `validation_data` - The validation data that was returned when building +/// the Authorization URL, for the state returned at the Authorization +/// endpoint. +/// +/// * `id_token_verification_data` - The data required to verify the ID Token in +/// the response. +/// +/// The signing algorithm corresponds to the `id_token_signed_response_alg` +/// field in the client metadata. +/// +/// If it is not provided, the ID Token won't be verified. Note that in the +/// OpenID Connect specification, this verification is required. +/// +/// * `now` - The current time. +/// +/// * `rng` - A random number generator. +/// +/// # Errors +/// +/// Returns an error if the request fails, the response is invalid or the +/// verification of the ID Token fails. +#[allow(clippy::too_many_arguments)] +#[tracing::instrument(skip_all, fields(token_endpoint))] +pub async fn access_token_with_authorization_code( + http_client: &reqwest::Client, + client_credentials: ClientCredentials, + token_endpoint: &Url, + code: String, + validation_data: AuthorizationValidationData, + id_token_verification_data: Option>, + now: DateTime, + rng: &mut impl Rng, +) -> Result<(AccessTokenResponse, Option>), TokenAuthorizationCodeError> { + tracing::debug!("Exchanging authorization code for access token..."); + + let token_response = request_access_token( + http_client, + client_credentials, + token_endpoint, + AccessTokenRequest::AuthorizationCode(AuthorizationCodeGrant { + code: code.clone(), + redirect_uri: Some(validation_data.redirect_uri), + code_verifier: validation_data.code_challenge_verifier, + }), + now, + rng, + ) + .await?; + + let id_token = if let Some(verification_data) = id_token_verification_data { + let signing_alg = verification_data.signing_algorithm; + + let id_token = token_response + .id_token + .as_deref() + .ok_or(IdTokenError::MissingIdToken)?; + + let id_token = verify_id_token(id_token, verification_data, None, now)?; + + let mut claims = id_token.payload().clone(); + + // Access token hash must match. + claims::AT_HASH + .extract_optional_with_options( + &mut claims, + TokenHash::new(signing_alg, &token_response.access_token), + ) + .map_err(IdTokenError::from)?; + + // Code hash must match. + claims::C_HASH + .extract_optional_with_options(&mut claims, TokenHash::new(signing_alg, &code)) + .map_err(IdTokenError::from)?; + + // Nonce must match if we have one. + if let Some(nonce) = validation_data.nonce.as_deref() { + claims::NONCE + .extract_required_with_options(&mut claims, nonce) + .map_err(IdTokenError::from)?; + } + + Some(id_token.into_owned()) + } else { + None + }; + + Ok((token_response, id_token)) +} diff --git a/matrix-authentication-service/crates/oidc-client/src/requests/client_credentials.rs b/matrix-authentication-service/crates/oidc-client/src/requests/client_credentials.rs new file mode 100644 index 00000000..539d1a71 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/requests/client_credentials.rs @@ -0,0 +1,67 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Requests for the [Client Credentials flow]. +//! +//! [Client Credentials flow]: https://www.rfc-editor.org/rfc/rfc6749#section-4.4 + +use chrono::{DateTime, Utc}; +use oauth2_types::{ + requests::{AccessTokenRequest, AccessTokenResponse, ClientCredentialsGrant}, + scope::Scope, +}; +use rand::Rng; +use url::Url; + +use crate::{ + error::TokenRequestError, requests::token::request_access_token, + types::client_credentials::ClientCredentials, +}; + +/// Exchange an authorization code for an access token. +/// +/// This should be used as the first step for logging in, and to request a +/// token with a new scope. +/// +/// # Arguments +/// +/// * `http_client` - The reqwest client to use for making HTTP requests. +/// +/// * `client_credentials` - The credentials obtained when registering the +/// client. +/// +/// * `token_endpoint` - The URL of the issuer's Token endpoint. +/// +/// * `scope` - The scope to authorize. +/// +/// * `now` - The current time. +/// +/// * `rng` - A random number generator. +/// +/// # Errors +/// +/// Returns an error if the request fails or the response is invalid. +#[tracing::instrument(skip_all, fields(token_endpoint))] +pub async fn access_token_with_client_credentials( + http_client: &reqwest::Client, + client_credentials: ClientCredentials, + token_endpoint: &Url, + scope: Option, + now: DateTime, + rng: &mut impl Rng, +) -> Result { + tracing::debug!("Requesting access token with client credentials..."); + + request_access_token( + http_client, + client_credentials, + token_endpoint, + AccessTokenRequest::ClientCredentials(ClientCredentialsGrant { scope }), + now, + rng, + ) + .await +} diff --git a/matrix-authentication-service/crates/oidc-client/src/requests/discovery.rs b/matrix-authentication-service/crates/oidc-client/src/requests/discovery.rs new file mode 100644 index 00000000..e3807c41 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/requests/discovery.rs @@ -0,0 +1,94 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Requests for OpenID Connect Provider [Discovery]. +//! +//! [Discovery]: https://openid.net/specs/openid-connect-discovery-1_0.html + +use mas_http::RequestBuilderExt; +use oauth2_types::oidc::{ProviderMetadata, VerifiedProviderMetadata}; +use url::Url; + +use crate::error::DiscoveryError; + +/// Fetch the provider metadata. +async fn discover_inner( + client: &reqwest::Client, + issuer: Url, +) -> Result { + tracing::debug!("Fetching provider metadata..."); + + let mut config_url = issuer; + + // If the path doesn't end with a slash, the last segment is removed when + // using `join`. + if !config_url.path().ends_with('/') { + let mut path = config_url.path().to_owned(); + path.push('/'); + config_url.set_path(&path); + } + + let config_url = config_url.join(".well-known/openid-configuration")?; + + let response = client + .get(config_url.as_str()) + .send_traced() + .await? + .error_for_status()? + .json() + .await?; + + tracing::debug!(?response); + + Ok(response) +} + +/// Fetch the provider metadata and validate it. +/// +/// # Errors +/// +/// Returns an error if the request fails or if the data is invalid. +#[tracing::instrument(skip_all, fields(issuer))] +pub async fn discover( + client: &reqwest::Client, + issuer: &str, +) -> Result { + let provider_metadata = discover_inner(client, issuer.parse()?).await?; + + Ok(provider_metadata.validate(issuer)?) +} + +/// Fetch the [provider metadata] and make basic checks. +/// +/// Contrary to [`discover()`], this uses +/// [`ProviderMetadata::insecure_verify_metadata()`] to check the received +/// metadata instead of validating it according to the specification. +/// +/// # Arguments +/// +/// * `http_client` - The reqwest client to use for making HTTP requests. +/// +/// * `issuer` - The URL of the OpenID Connect Provider to fetch metadata for. +/// +/// # Errors +/// +/// Returns an error if the request fails or if the data is invalid. +/// +/// # Warning +/// +/// It is not recommended to use this method in production as it doesn't +/// ensure that the issuer implements the proper security practices. +/// +/// [provider metadata]: https://openid.net/specs/openid-connect-discovery-1_0.html +#[tracing::instrument(skip_all, fields(issuer))] +pub async fn insecure_discover( + client: &reqwest::Client, + issuer: &str, +) -> Result { + let provider_metadata = discover_inner(client, issuer.parse()?).await?; + + Ok(provider_metadata.insecure_verify_metadata()?) +} diff --git a/matrix-authentication-service/crates/oidc-client/src/requests/jose.rs b/matrix-authentication-service/crates/oidc-client/src/requests/jose.rs new file mode 100644 index 00000000..12915caa --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/requests/jose.rs @@ -0,0 +1,207 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Requests and method related to JSON Object Signing and Encryption. + +use std::collections::HashMap; + +use chrono::{DateTime, Utc}; +use mas_http::RequestBuilderExt; +use mas_iana::jose::JsonWebSignatureAlg; +use mas_jose::{ + claims::{self, TimeOptions}, + jwk::PublicJsonWebKeySet, + jwt::Jwt, +}; +use serde_json::Value; +use url::Url; + +use crate::{ + error::{IdTokenError, JwksError, JwtVerificationError}, + types::IdToken, +}; + +/// Fetch a JWKS at the given URL. +/// +/// # Arguments +/// +/// * `http_client` - The reqwest client to use for making HTTP requests. +/// +/// * `jwks_uri` - The URL where the JWKS can be retrieved. +/// +/// # Errors +/// +/// Returns an error if the request fails or if the data is invalid. +#[tracing::instrument(skip_all, fields(jwks_uri))] +pub async fn fetch_jwks( + client: &reqwest::Client, + jwks_uri: &Url, +) -> Result { + tracing::debug!("Fetching JWKS..."); + + let response: PublicJsonWebKeySet = client + .get(jwks_uri.as_str()) + .send_traced() + .await? + .error_for_status()? + .json() + .await?; + + Ok(response) +} + +/// The data required to verify a JWT. +#[derive(Clone, Copy)] +pub struct JwtVerificationData<'a> { + /// The URL of the issuer that generated the ID Token. + pub issuer: Option<&'a str>, + + /// The issuer's JWKS. + pub jwks: &'a PublicJsonWebKeySet, + + /// The ID obtained when registering the client. + pub client_id: &'a String, + + /// The JWA that should have been used to sign the JWT, as set during + /// client registration. + pub signing_algorithm: &'a JsonWebSignatureAlg, +} + +/// Decode and verify a signed JWT. +/// +/// The following checks are performed: +/// +/// * The signature is verified with the given JWKS. +/// +/// * The `iss` claim must be present and match the issuer, if present +/// +/// * The `aud` claim must be present and match the client ID. +/// +/// * The `alg` in the header must match the signing algorithm. +/// +/// # Arguments +/// +/// * `jwt` - The serialized JWT to decode and verify. +/// +/// * `jwks` - The JWKS that should contain the public key to verify the JWT's +/// signature. +/// +/// * `issuer` - The issuer of the JWT. +/// +/// * `audience` - The audience that the JWT is intended for. +/// +/// * `signing_algorithm` - The JWA that should have been used to sign the JWT. +/// +/// # Errors +/// +/// Returns an error if the data is invalid or verification fails. +pub fn verify_signed_jwt<'a>( + jwt: &'a str, + verification_data: JwtVerificationData<'_>, +) -> Result>, JwtVerificationError> { + tracing::debug!("Validating JWT..."); + + let JwtVerificationData { + issuer, + jwks, + client_id, + signing_algorithm, + } = verification_data; + + let jwt: Jwt> = jwt.try_into()?; + + jwt.verify_with_jwks(jwks)?; + + let (header, mut claims) = jwt.clone().into_parts(); + + if let Some(issuer) = issuer { + // Must have the proper issuer. + claims::ISS.extract_required_with_options(&mut claims, issuer)?; + } + + // Must have the proper audience. + claims::AUD.extract_required_with_options(&mut claims, client_id)?; + + // Must use the proper algorithm. + if header.alg() != signing_algorithm { + return Err(JwtVerificationError::WrongSignatureAlg); + } + + Ok(jwt) +} + +/// Decode and verify an ID Token. +/// +/// Besides the checks of [`verify_signed_jwt()`], the following checks are +/// performed: +/// +/// * The `exp` claim must be present and the token must not have expired. +/// +/// * The `iat` claim must be present must be in the past. +/// +/// * The `sub` claim must be present. +/// +/// If an authorization ID token is provided, these extra checks are performed: +/// +/// * The `sub` claims must match. +/// +/// * The `auth_time` claims must match. +/// +/// # Arguments +/// +/// * `id_token` - The serialized ID Token to decode and verify. +/// +/// * `verification_data` - The data necessary to verify the ID Token. +/// +/// * `auth_id_token` - If the ID Token is not verified during an authorization +/// request, the ID token that was returned from the latest authorization +/// request. +/// +/// # Errors +/// +/// Returns an error if the data is invalid or verification fails. +pub fn verify_id_token<'a>( + id_token: &'a str, + verification_data: JwtVerificationData<'_>, + auth_id_token: Option<&IdToken<'_>>, + now: DateTime, +) -> Result, IdTokenError> { + let id_token = verify_signed_jwt(id_token, verification_data)?; + + let mut claims = id_token.payload().clone(); + + let time_options = TimeOptions::new(now); + // Must not have expired. + claims::EXP.extract_required_with_options(&mut claims, &time_options)?; + + // `iat` claim must be present. + claims::IAT.extract_required_with_options(&mut claims, time_options)?; + + // Subject identifier must be present. + let sub = claims::SUB.extract_required(&mut claims)?; + + // More checks if there is a previous ID token. + if let Some(auth_id_token) = auth_id_token { + let mut auth_claims = auth_id_token.payload().clone(); + + // Subject identifier must always be the same. + let auth_sub = claims::SUB.extract_required(&mut auth_claims)?; + if sub != auth_sub { + return Err(IdTokenError::WrongSubjectIdentifier); + } + + // If the authentication time is present, it must be unchanged. + if let Some(auth_time) = claims::AUTH_TIME.extract_optional(&mut claims)? { + let prev_auth_time = claims::AUTH_TIME.extract_required(&mut auth_claims)?; + + if prev_auth_time != auth_time { + return Err(IdTokenError::WrongAuthTime); + } + } + } + + Ok(id_token) +} diff --git a/matrix-authentication-service/crates/oidc-client/src/requests/mod.rs b/matrix-authentication-service/crates/oidc-client/src/requests/mod.rs new file mode 100644 index 00000000..24cc9e27 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/requests/mod.rs @@ -0,0 +1,15 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Methods to interact with OpenID Connect and OAuth2.0 endpoints. + +pub mod authorization_code; +pub mod client_credentials; +pub mod discovery; +pub mod jose; +pub mod refresh_token; +pub mod token; +pub mod userinfo; diff --git a/matrix-authentication-service/crates/oidc-client/src/requests/refresh_token.rs b/matrix-authentication-service/crates/oidc-client/src/requests/refresh_token.rs new file mode 100644 index 00000000..9af088b0 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/requests/refresh_token.rs @@ -0,0 +1,119 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Requests for using [Refresh Tokens]. +//! +//! [Refresh Tokens]: https://openid.net/specs/openid-connect-core-1_0.html#RefreshTokens + +use chrono::{DateTime, Utc}; +use mas_jose::claims::{self, TokenHash}; +use oauth2_types::{ + requests::{AccessTokenRequest, AccessTokenResponse, RefreshTokenGrant}, + scope::Scope, +}; +use rand::Rng; +use url::Url; + +use super::jose::JwtVerificationData; +use crate::{ + error::{IdTokenError, TokenRefreshError}, + requests::{jose::verify_id_token, token::request_access_token}, + types::{IdToken, client_credentials::ClientCredentials}, +}; + +/// Exchange an authorization code for an access token. +/// +/// This should be used as the first step for logging in, and to request a +/// token with a new scope. +/// +/// # Arguments +/// +/// * `http_client` - The reqwest client to use for making HTTP requests. +/// +/// * `client_credentials` - The credentials obtained when registering the +/// client. +/// +/// * `token_endpoint` - The URL of the issuer's Token endpoint. +/// +/// * `refresh_token` - The token used to refresh the access token returned at +/// the Token endpoint. +/// +/// * `scope` - The scope of the access token. The requested scope must not +/// include any scope not originally granted to the access token, and if +/// omitted is treated as equal to the scope originally granted by the issuer. +/// +/// * `id_token_verification_data` - The data required to verify the ID Token in +/// the response. +/// +/// The signing algorithm corresponds to the `id_token_signed_response_alg` +/// field in the client metadata. +/// +/// If it is not provided, the ID Token won't be verified. +/// +/// * `auth_id_token` - If an ID Token is expected in the response, the ID token +/// that was returned from the latest authorization request. +/// +/// * `now` - The current time. +/// +/// * `rng` - A random number generator. +/// +/// # Errors +/// +/// Returns an error if the request fails, the response is invalid or the +/// verification of the ID Token fails. +#[allow(clippy::too_many_arguments)] +#[tracing::instrument(skip_all, fields(token_endpoint))] +pub async fn refresh_access_token( + http_client: &reqwest::Client, + client_credentials: ClientCredentials, + token_endpoint: &Url, + refresh_token: String, + scope: Option, + id_token_verification_data: Option>, + auth_id_token: Option<&IdToken<'_>>, + now: DateTime, + rng: &mut impl Rng, +) -> Result<(AccessTokenResponse, Option>), TokenRefreshError> { + tracing::debug!("Refreshing access token…"); + + let token_response = request_access_token( + http_client, + client_credentials, + token_endpoint, + AccessTokenRequest::RefreshToken(RefreshTokenGrant { + refresh_token, + scope, + }), + now, + rng, + ) + .await?; + + let id_token = if let Some((verification_data, id_token)) = + id_token_verification_data.zip(token_response.id_token.as_ref()) + { + let auth_id_token = auth_id_token.ok_or(IdTokenError::MissingAuthIdToken)?; + let signing_alg = verification_data.signing_algorithm; + + let id_token = verify_id_token(id_token, verification_data, Some(auth_id_token), now)?; + + let mut claims = id_token.payload().clone(); + + // Access token hash must match. + claims::AT_HASH + .extract_optional_with_options( + &mut claims, + TokenHash::new(signing_alg, &token_response.access_token), + ) + .map_err(IdTokenError::from)?; + + Some(id_token.into_owned()) + } else { + None + }; + + Ok((token_response, id_token)) +} diff --git a/matrix-authentication-service/crates/oidc-client/src/requests/token.rs b/matrix-authentication-service/crates/oidc-client/src/requests/token.rs new file mode 100644 index 00000000..774ab93d --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/requests/token.rs @@ -0,0 +1,67 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Requests for the Token endpoint. + +use chrono::{DateTime, Utc}; +use http::header::ACCEPT; +use mas_http::RequestBuilderExt; +use mime::APPLICATION_JSON; +use oauth2_types::requests::{AccessTokenRequest, AccessTokenResponse}; +use rand::Rng; +use url::Url; + +use crate::{ + error::{ResponseExt, TokenRequestError}, + types::client_credentials::ClientCredentials, +}; + +/// Request an access token. +/// +/// # Arguments +/// +/// * `http_client` - The reqwest client to use for making HTTP requests. +/// +/// * `client_credentials` - The credentials obtained when registering the +/// client. +/// +/// * `token_endpoint` - The URL of the issuer's Token endpoint. +/// +/// * `request` - The request to make at the Token endpoint. +/// +/// * `now` - The current time. +/// +/// * `rng` - A random number generator. +/// +/// # Errors +/// +/// Returns an error if the request fails or the response is invalid. +#[tracing::instrument(skip_all, fields(token_endpoint, request))] +pub async fn request_access_token( + http_client: &reqwest::Client, + client_credentials: ClientCredentials, + token_endpoint: &Url, + request: AccessTokenRequest, + now: DateTime, + rng: &mut impl Rng, +) -> Result { + tracing::debug!(?request, "Requesting access token..."); + + let token_request = http_client + .post(token_endpoint.as_str()) + .header(ACCEPT, APPLICATION_JSON.as_ref()); + + let token_response = client_credentials + .authenticated_form(token_request, &request, now, rng)? + .send_traced() + .await? + .error_from_oauth2_error_response() + .await? + .json() + .await?; + + Ok(token_response) +} diff --git a/matrix-authentication-service/crates/oidc-client/src/requests/userinfo.rs b/matrix-authentication-service/crates/oidc-client/src/requests/userinfo.rs new file mode 100644 index 00000000..e10526fc --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/requests/userinfo.rs @@ -0,0 +1,105 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Requests for obtaining [Claims] about an end-user. +//! +//! [Claims]: https://openid.net/specs/openid-connect-core-1_0.html#Claims + +use std::collections::HashMap; + +use headers::{ContentType, HeaderMapExt, HeaderValue}; +use http::header::ACCEPT; +use mas_http::RequestBuilderExt; +use mime::Mime; +use serde_json::Value; +use url::Url; + +use super::jose::JwtVerificationData; +use crate::{ + error::{IdTokenError, ResponseExt, UserInfoError}, + requests::jose::verify_signed_jwt, +}; + +/// Obtain information about an authenticated end-user. +/// +/// Returns a map of claims with their value, that should be extracted with +/// one of the [`Claim`] methods. +/// +/// # Arguments +/// +/// * `http_client` - The reqwest client to use for making HTTP requests. +/// +/// * `userinfo_endpoint` - The URL of the issuer's User Info endpoint. +/// +/// * `access_token` - The access token of the end-user. +/// +/// * `jwt_verification_data` - The data required to verify the response if a +/// signed response was requested during client registration. +/// +/// The signing algorithm corresponds to the `userinfo_signed_response_alg` +/// field in the client metadata. +/// +/// * `auth_id_token` - The ID token that was returned from the latest +/// authorization request. +/// +/// # Errors +/// +/// Returns an error if the request fails, the response is invalid or the +/// validation of the signed response fails. +/// +/// [`Claim`]: mas_jose::claims::Claim +#[tracing::instrument(skip_all, fields(userinfo_endpoint))] +pub async fn fetch_userinfo( + http_client: &reqwest::Client, + userinfo_endpoint: &Url, + access_token: &str, + jwt_verification_data: Option>, +) -> Result, UserInfoError> { + tracing::debug!("Obtaining user info…"); + + let expected_content_type = if jwt_verification_data.is_some() { + "application/jwt" + } else { + mime::APPLICATION_JSON.as_ref() + }; + + let userinfo_request = http_client + .get(userinfo_endpoint.as_str()) + .bearer_auth(access_token) + .header(ACCEPT, HeaderValue::from_static(expected_content_type)); + + let userinfo_response = userinfo_request + .send_traced() + .await? + .error_from_oauth2_error_response() + .await?; + + let content_type: Mime = userinfo_response + .headers() + .typed_try_get::() + .map_err(|_| UserInfoError::InvalidResponseContentTypeValue)? + .ok_or(UserInfoError::MissingResponseContentType)? + .into(); + + if content_type.essence_str() != expected_content_type { + return Err(UserInfoError::UnexpectedResponseContentType { + expected: expected_content_type.to_owned(), + got: content_type.to_string(), + }); + } + + let claims = if let Some(verification_data) = jwt_verification_data { + let response_body = userinfo_response.text().await?; + verify_signed_jwt(&response_body, verification_data) + .map_err(IdTokenError::from)? + .into_parts() + .1 + } else { + userinfo_response.json().await? + }; + + Ok(claims) +} diff --git a/matrix-authentication-service/crates/oidc-client/src/types/client_credentials.rs b/matrix-authentication-service/crates/oidc-client/src/types/client_credentials.rs new file mode 100644 index 00000000..a097ef46 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/types/client_credentials.rs @@ -0,0 +1,369 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Types and methods for client credentials. + +use std::{collections::HashMap, fmt}; + +use base64ct::{Base64UrlUnpadded, Encoding}; +use chrono::{DateTime, Duration, Utc}; +use mas_iana::{jose::JsonWebSignatureAlg, oauth::OAuthClientAuthenticationMethod}; +use mas_jose::{ + claims::{self, ClaimError}, + constraints::Constrainable, + jwa::{AsymmetricSigningKey, SymmetricKey}, + jwt::{JsonWebSignatureHeader, Jwt}, +}; +use mas_keystore::Keystore; +use rand::Rng; +use serde::Serialize; +use serde_json::Value; +use url::Url; + +use crate::error::CredentialsError; + +/// The supported authentication methods of this library. +/// +/// During client registration, make sure that you only use one of the values +/// defined here. +pub const CLIENT_SUPPORTED_AUTH_METHODS: &[OAuthClientAuthenticationMethod] = &[ + OAuthClientAuthenticationMethod::None, + OAuthClientAuthenticationMethod::ClientSecretBasic, + OAuthClientAuthenticationMethod::ClientSecretPost, + OAuthClientAuthenticationMethod::ClientSecretJwt, + OAuthClientAuthenticationMethod::PrivateKeyJwt, +]; + +/// The credentials obtained during registration, to authenticate a client on +/// endpoints that require it. +#[derive(Clone)] +pub enum ClientCredentials { + /// No client authentication is used. + /// + /// This is used if the client is public. + None { + /// The unique ID for the client. + client_id: String, + }, + + /// The client authentication is sent via the Authorization HTTP header. + ClientSecretBasic { + /// The unique ID for the client. + client_id: String, + + /// The secret of the client. + client_secret: String, + }, + + /// The client authentication is sent with the body of the request. + ClientSecretPost { + /// The unique ID for the client. + client_id: String, + + /// The secret of the client. + client_secret: String, + }, + + /// The client authentication uses a JWT signed with a key derived from the + /// client secret. + ClientSecretJwt { + /// The unique ID for the client. + client_id: String, + + /// The secret of the client. + client_secret: String, + + /// The algorithm used to sign the JWT. + signing_algorithm: JsonWebSignatureAlg, + + /// The URL of the issuer's Token endpoint. + token_endpoint: Url, + }, + + /// The client authentication uses a JWT signed with a private key. + PrivateKeyJwt { + /// The unique ID for the client. + client_id: String, + + /// The keystore used to sign the JWT + keystore: Keystore, + + /// The algorithm used to sign the JWT. + signing_algorithm: JsonWebSignatureAlg, + + /// The URL of the issuer's Token endpoint. + token_endpoint: Url, + }, + + /// The client authenticates like Sign in with Apple wants + SignInWithApple { + /// The unique ID for the client. + client_id: String, + + /// The ECDSA key used to sign + key: elliptic_curve::SecretKey, + + /// The key ID + key_id: String, + + /// The Apple Team ID + team_id: String, + }, +} + +impl ClientCredentials { + /// Get the client ID of these `ClientCredentials`. + #[must_use] + pub fn client_id(&self) -> &str { + match self { + ClientCredentials::None { client_id } + | ClientCredentials::ClientSecretBasic { client_id, .. } + | ClientCredentials::ClientSecretPost { client_id, .. } + | ClientCredentials::ClientSecretJwt { client_id, .. } + | ClientCredentials::PrivateKeyJwt { client_id, .. } + | ClientCredentials::SignInWithApple { client_id, .. } => client_id, + } + } + + /// Apply these [`ClientCredentials`] to the given request with the given + /// form. + pub(crate) fn authenticated_form( + &self, + request: reqwest::RequestBuilder, + form: &T, + now: DateTime, + rng: &mut impl Rng, + ) -> Result { + let request = match self { + ClientCredentials::None { client_id } => request.form(&RequestWithClientCredentials { + body: form, + client_id: Some(client_id), + client_secret: None, + client_assertion: None, + client_assertion_type: None, + }), + + ClientCredentials::ClientSecretBasic { + client_id, + client_secret, + } => { + let username = + form_urlencoded::byte_serialize(client_id.as_bytes()).collect::(); + let password = + form_urlencoded::byte_serialize(client_secret.as_bytes()).collect::(); + request + .basic_auth(username, Some(password)) + .form(&RequestWithClientCredentials { + body: form, + client_id: None, + client_secret: None, + client_assertion: None, + client_assertion_type: None, + }) + } + + ClientCredentials::ClientSecretPost { + client_id, + client_secret, + } => request.form(&RequestWithClientCredentials { + body: form, + client_id: Some(client_id), + client_secret: Some(client_secret), + client_assertion: None, + client_assertion_type: None, + }), + + ClientCredentials::ClientSecretJwt { + client_id, + client_secret, + signing_algorithm, + token_endpoint, + } => { + let claims = + prepare_claims(client_id.clone(), token_endpoint.to_string(), now, rng)?; + let key = SymmetricKey::new_for_alg( + client_secret.as_bytes().to_vec(), + signing_algorithm, + )?; + let header = JsonWebSignatureHeader::new(signing_algorithm.clone()); + + let jwt = Jwt::sign(header, claims, &key)?; + + request.form(&RequestWithClientCredentials { + body: form, + client_id: None, + client_secret: None, + client_assertion: Some(jwt.as_str()), + client_assertion_type: Some(JwtBearerClientAssertionType), + }) + } + + ClientCredentials::PrivateKeyJwt { + client_id, + keystore, + signing_algorithm, + token_endpoint, + } => { + let claims = + prepare_claims(client_id.clone(), token_endpoint.to_string(), now, rng)?; + + let key = keystore + .signing_key_for_algorithm(signing_algorithm) + .ok_or(CredentialsError::NoPrivateKeyFound)?; + let signer = key + .params() + .signing_key_for_alg(signing_algorithm) + .map_err(|_| CredentialsError::JwtWrongAlgorithm)?; + let mut header = JsonWebSignatureHeader::new(signing_algorithm.clone()); + + if let Some(kid) = key.kid() { + header = header.with_kid(kid); + } + + let client_assertion = Jwt::sign(header, claims, &signer)?; + + request.form(&RequestWithClientCredentials { + body: form, + client_id: None, + client_secret: None, + client_assertion: Some(client_assertion.as_str()), + client_assertion_type: Some(JwtBearerClientAssertionType), + }) + } + + ClientCredentials::SignInWithApple { + client_id, + key, + key_id, + team_id, + } => { + // SIWA expects a signed JWT as client secret + // https://developer.apple.com/documentation/accountorganizationaldatasharing/creating-a-client-secret + let signer = AsymmetricSigningKey::es256(key.clone()); + + let mut claims = HashMap::new(); + + claims::ISS.insert(&mut claims, team_id)?; + claims::SUB.insert(&mut claims, client_id)?; + claims::AUD.insert(&mut claims, "https://appleid.apple.com".to_owned())?; + claims::IAT.insert(&mut claims, now)?; + claims::EXP.insert(&mut claims, now + Duration::microseconds(60 * 1000 * 1000))?; + + let header = + JsonWebSignatureHeader::new(JsonWebSignatureAlg::Es256).with_kid(key_id); + + let client_secret = Jwt::sign(header, claims, &signer)?; + + request.form(&RequestWithClientCredentials { + body: form, + client_id: Some(client_id), + client_secret: Some(client_secret.as_str()), + client_assertion: None, + client_assertion_type: None, + }) + } + }; + + Ok(request) + } +} + +impl fmt::Debug for ClientCredentials { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::None { client_id } => f + .debug_struct("None") + .field("client_id", client_id) + .finish(), + Self::ClientSecretBasic { client_id, .. } => f + .debug_struct("ClientSecretBasic") + .field("client_id", client_id) + .finish_non_exhaustive(), + Self::ClientSecretPost { client_id, .. } => f + .debug_struct("ClientSecretPost") + .field("client_id", client_id) + .finish_non_exhaustive(), + Self::ClientSecretJwt { + client_id, + signing_algorithm, + token_endpoint, + .. + } => f + .debug_struct("ClientSecretJwt") + .field("client_id", client_id) + .field("signing_algorithm", signing_algorithm) + .field("token_endpoint", token_endpoint) + .finish_non_exhaustive(), + Self::PrivateKeyJwt { + client_id, + signing_algorithm, + token_endpoint, + .. + } => f + .debug_struct("PrivateKeyJwt") + .field("client_id", client_id) + .field("signing_algorithm", signing_algorithm) + .field("token_endpoint", token_endpoint) + .finish_non_exhaustive(), + Self::SignInWithApple { + client_id, + key_id, + team_id, + .. + } => f + .debug_struct("SignInWithApple") + .field("client_id", client_id) + .field("key_id", key_id) + .field("team_id", team_id) + .finish_non_exhaustive(), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)] +#[serde(rename = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer")] +struct JwtBearerClientAssertionType; + +fn prepare_claims( + iss: String, + aud: String, + now: DateTime, + rng: &mut impl Rng, +) -> Result, ClaimError> { + let mut claims = HashMap::new(); + + claims::ISS.insert(&mut claims, iss.clone())?; + claims::SUB.insert(&mut claims, iss)?; + claims::AUD.insert(&mut claims, aud)?; + claims::IAT.insert(&mut claims, now)?; + claims::EXP.insert( + &mut claims, + now + Duration::microseconds(5 * 60 * 1000 * 1000), + )?; + + let mut jti = [0u8; 16]; + rng.fill(&mut jti); + let jti = Base64UrlUnpadded::encode_string(&jti); + claims::JTI.insert(&mut claims, jti)?; + + Ok(claims) +} + +/// A request with client credentials added to it. +#[derive(Clone, Serialize)] +struct RequestWithClientCredentials<'a, T> { + #[serde(flatten)] + body: T, + + #[serde(skip_serializing_if = "Option::is_none")] + client_id: Option<&'a str>, + #[serde(skip_serializing_if = "Option::is_none")] + client_secret: Option<&'a str>, + #[serde(skip_serializing_if = "Option::is_none")] + client_assertion: Option<&'a str>, + #[serde(skip_serializing_if = "Option::is_none")] + client_assertion_type: Option, +} diff --git a/matrix-authentication-service/crates/oidc-client/src/types/mod.rs b/matrix-authentication-service/crates/oidc-client/src/types/mod.rs new file mode 100644 index 00000000..79fe33a1 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/src/types/mod.rs @@ -0,0 +1,22 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! OAuth 2.0 and OpenID Connect types. + +pub mod client_credentials; + +use std::collections::HashMap; + +#[doc(inline)] +pub use mas_iana as iana; +use mas_jose::jwt::Jwt; +pub use oauth2_types::*; +use serde_json::Value; + +/// An OpenID Connect [ID Token]. +/// +/// [ID Token]: https://openid.net/specs/openid-connect-core-1_0.html#IDToken +pub type IdToken<'a> = Jwt<'a, HashMap>; diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/main.rs b/matrix-authentication-service/crates/oidc-client/tests/it/main.rs new file mode 100644 index 00000000..cc864108 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/main.rs @@ -0,0 +1,153 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashMap; + +use chrono::{DateTime, Duration, Utc}; +use mas_iana::{jose::JsonWebSignatureAlg, oauth::OAuthClientAuthenticationMethod}; +use mas_jose::{ + claims::{self, hash_token}, + constraints::Constrainable, + jwk::PublicJsonWebKeySet, + jwt::{JsonWebSignatureHeader, Jwt}, +}; +use mas_keystore::{JsonWebKey, JsonWebKeySet, Keystore, PrivateKey}; +use mas_oidc_client::types::{IdToken, client_credentials::ClientCredentials}; +use rand::{ + SeedableRng, + distributions::{Alphanumeric, DistString}, +}; +use url::Url; +use wiremock::MockServer; + +mod requests; +mod types; + +const REDIRECT_URI: &str = "http://localhost/"; +const CLIENT_ID: &str = "client!+ID"; +const CLIENT_SECRET: &str = "SECRET?%Gclient"; +const AUTHORIZATION_CODE: &str = "authC0D3"; +const CODE_VERIFIER: &str = "cODEv3R1f1ER"; +const NONCE: &str = "No0o0o0once"; +const ACCESS_TOKEN: &str = "AccessToken1"; +const REFRESH_TOKEN: &str = "RefreshToken1"; +const SUBJECT_IDENTIFIER: &str = "SubjectID"; +const ID_TOKEN_SIGNING_ALG: JsonWebSignatureAlg = JsonWebSignatureAlg::Rs256; + +fn now() -> DateTime { + #[allow(clippy::disallowed_methods)] + Utc::now() +} + +async fn init_test() -> (reqwest::Client, MockServer, Url) { + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + + let client = mas_http::reqwest_client(); + let mock_server = MockServer::start().await; + let issuer = Url::parse(&mock_server.uri()).expect("Couldn't parse URL"); + + (client, mock_server, issuer) +} + +/// Generate a keystore with a single key for the given algorithm. +fn keystore(alg: &JsonWebSignatureAlg) -> Keystore { + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + let private_key = match alg { + JsonWebSignatureAlg::Rs256 + | JsonWebSignatureAlg::Rs384 + | JsonWebSignatureAlg::Rs512 + | JsonWebSignatureAlg::Ps256 + | JsonWebSignatureAlg::Ps384 + | JsonWebSignatureAlg::Ps512 => PrivateKey::generate_rsa(&mut rng).unwrap(), + JsonWebSignatureAlg::Es256 => PrivateKey::generate_ec_p256(&mut rng), + JsonWebSignatureAlg::Es384 => PrivateKey::generate_ec_p384(&mut rng), + _ => unimplemented!(), + }; + + let jwk = JsonWebKey::new(private_key).with_kid(Alphanumeric.sample_string(&mut rng, 10)); + + Keystore::new(JsonWebKeySet::new(vec![jwk])) +} + +/// Generate an ID token. +fn id_token(issuer: &str) -> (IdToken<'_>, PublicJsonWebKeySet) { + let signing_alg = ID_TOKEN_SIGNING_ALG; + + let keystore = keystore(&signing_alg); + let mut claims = HashMap::new(); + let now = now(); + + claims::ISS.insert(&mut claims, issuer.to_owned()).unwrap(); + claims::SUB + .insert(&mut claims, SUBJECT_IDENTIFIER.to_owned()) + .unwrap(); + claims::AUD + .insert(&mut claims, CLIENT_ID.to_owned()) + .unwrap(); + claims::NONCE.insert(&mut claims, NONCE.to_owned()).unwrap(); + + claims::IAT.insert(&mut claims, now).unwrap(); + claims::EXP + .insert(&mut claims, now + Duration::try_hours(1).unwrap()) + .unwrap(); + + claims::AT_HASH + .insert(&mut claims, hash_token(&signing_alg, ACCESS_TOKEN).unwrap()) + .unwrap(); + claims::C_HASH + .insert( + &mut claims, + hash_token(&signing_alg, AUTHORIZATION_CODE).unwrap(), + ) + .unwrap(); + + let key = keystore.signing_key_for_algorithm(&signing_alg).unwrap(); + let signer = key.params().signing_key_for_alg(&signing_alg).unwrap(); + let header = JsonWebSignatureHeader::new(signing_alg).with_kid(key.kid().unwrap()); + let id_token = Jwt::sign(header, claims, &signer).unwrap(); + + (id_token, keystore.public_jwks()) +} + +/// Generate client credentials for the given authentication method. +fn client_credentials( + auth_method: &OAuthClientAuthenticationMethod, + issuer: &Url, +) -> ClientCredentials { + match auth_method { + OAuthClientAuthenticationMethod::None => ClientCredentials::None { + client_id: CLIENT_ID.to_owned(), + }, + OAuthClientAuthenticationMethod::ClientSecretPost => ClientCredentials::ClientSecretPost { + client_id: CLIENT_ID.to_owned(), + client_secret: CLIENT_SECRET.to_owned(), + }, + OAuthClientAuthenticationMethod::ClientSecretBasic => { + ClientCredentials::ClientSecretBasic { + client_id: CLIENT_ID.to_owned(), + client_secret: CLIENT_SECRET.to_owned(), + } + } + OAuthClientAuthenticationMethod::ClientSecretJwt => ClientCredentials::ClientSecretJwt { + client_id: CLIENT_ID.to_owned(), + client_secret: CLIENT_SECRET.to_owned(), + signing_algorithm: JsonWebSignatureAlg::Hs256, + token_endpoint: issuer.join("token").unwrap(), + }, + OAuthClientAuthenticationMethod::PrivateKeyJwt => { + let signing_algorithm = JsonWebSignatureAlg::Es256; + + ClientCredentials::PrivateKeyJwt { + client_id: CLIENT_ID.to_owned(), + keystore: keystore(&signing_algorithm), + signing_algorithm, + token_endpoint: issuer.join("token").unwrap(), + } + } + _ => unimplemented!(), + } +} diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/requests/authorization_code.rs b/matrix-authentication-service/crates/oidc-client/tests/it/requests/authorization_code.rs new file mode 100644 index 00000000..cc3f5b21 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/requests/authorization_code.rs @@ -0,0 +1,354 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, num::NonZeroU32}; + +use assert_matches::assert_matches; +use mas_iana::oauth::{ + OAuthAccessTokenType, OAuthClientAuthenticationMethod, PkceCodeChallengeMethod, +}; +use mas_jose::{claims::ClaimError, jwk::PublicJsonWebKeySet}; +use mas_oidc_client::{ + error::{IdTokenError, TokenAuthorizationCodeError}, + requests::{ + authorization_code::{ + AuthorizationRequestData, AuthorizationValidationData, + access_token_with_authorization_code, build_authorization_url, + }, + jose::JwtVerificationData, + }, +}; +use oauth2_types::{ + requests::{AccessTokenResponse, Display, Prompt}, + scope::OPENID, +}; +use rand::SeedableRng; +use url::Url; +use wiremock::{ + Mock, Request, ResponseTemplate, + matchers::{method, path}, +}; + +use crate::{ + ACCESS_TOKEN, AUTHORIZATION_CODE, CLIENT_ID, CODE_VERIFIER, ID_TOKEN_SIGNING_ALG, NONCE, + REDIRECT_URI, client_credentials, id_token, init_test, now, +}; + +#[test] +fn pass_authorization_url() { + let issuer = Url::parse("http://localhost/").unwrap(); + let authorization_endpoint = issuer.join("authorize").unwrap(); + let redirect_uri = Url::parse(REDIRECT_URI).unwrap(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + let (url, validation_data) = build_authorization_url( + authorization_endpoint, + AuthorizationRequestData::new( + CLIENT_ID.to_owned(), + [OPENID].into_iter().collect(), + redirect_uri, + ) + .with_code_challenge_methods_supported(vec![PkceCodeChallengeMethod::S256]), + &mut rng, + ) + .unwrap(); + + assert_eq!(validation_data.state, "OrJ8xbWovSpJUTKz"); + assert_eq!( + validation_data.code_challenge_verifier.unwrap(), + "TSgZ_hr3TJPjhq4aDp34K_8ksjLwaa1xDcPiRGBcjhM" + ); + + assert_eq!(url.path(), "/authorize"); + + let query_pairs = url.query_pairs().collect::>(); + assert_eq!(query_pairs.get("scope").unwrap(), "openid"); + assert_eq!(query_pairs.get("response_type").unwrap(), "code"); + assert_eq!(query_pairs.get("client_id").unwrap(), CLIENT_ID); + assert_eq!(query_pairs.get("redirect_uri").unwrap(), REDIRECT_URI); + assert_eq!(query_pairs.get("display"), None); + assert_eq!(query_pairs.get("prompt"), None); + assert_eq!(query_pairs.get("max_age"), None); + assert_eq!(query_pairs.get("ui_locales"), None); + assert_eq!(query_pairs.get("id_token_hint"), None); + assert_eq!(query_pairs.get("login_hint"), None); + assert_eq!(query_pairs.get("acr_values"), None); + assert_eq!(*query_pairs.get("state").unwrap(), validation_data.state); + assert_eq!(query_pairs.get("nonce").unwrap(), "ox0PigY5l9xl5uTL"); + let code_challenge = query_pairs.get("code_challenge").unwrap(); + assert!(code_challenge.len() >= 43); + assert_eq!(query_pairs.get("code_challenge_method").unwrap(), "S256"); +} + +#[test] +fn pass_full_authorization_url() { + let issuer = Url::parse("http://localhost/").unwrap(); + let authorization_endpoint = issuer.join("authorize").unwrap(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + let authorization_data = AuthorizationRequestData::new( + CLIENT_ID.to_owned(), + [OPENID].into_iter().collect(), + Url::parse(REDIRECT_URI).unwrap(), + ) + .with_display(Display::Touch) + .with_prompt(vec![Prompt::Create]) + .with_max_age(NonZeroU32::new(86400).unwrap()) + .with_ui_locales(vec!["de".parse().unwrap()]) + .with_id_token_hint("fake.id.token".to_owned()) + .with_login_hint("mxid:@user:localhost".to_owned()) + .with_acr_values(["custom".to_owned()].into()); + + let (url, validation_data) = + build_authorization_url(authorization_endpoint, authorization_data, &mut rng).unwrap(); + + assert_eq!(validation_data.state, "OrJ8xbWovSpJUTKz"); + assert_eq!(validation_data.code_challenge_verifier, None); + + assert_eq!(url.path(), "/authorize"); + + let query_pairs = url.query_pairs().collect::>(); + assert_eq!(query_pairs.get("scope").unwrap(), "openid"); + assert_eq!(query_pairs.get("response_type").unwrap(), "code"); + assert_eq!(query_pairs.get("client_id").unwrap(), CLIENT_ID); + assert_eq!(query_pairs.get("redirect_uri").unwrap(), REDIRECT_URI); + assert_eq!(query_pairs.get("display").unwrap(), "touch"); + assert_eq!(query_pairs.get("prompt").unwrap(), "create"); + assert_eq!(query_pairs.get("max_age").unwrap(), "86400"); + assert_eq!(query_pairs.get("ui_locales").unwrap(), "de"); + assert_eq!(query_pairs.get("id_token_hint").unwrap(), "fake.id.token"); + assert_eq!( + query_pairs.get("login_hint").unwrap(), + "mxid:@user:localhost" + ); + assert_eq!(query_pairs.get("acr_values").unwrap(), "custom"); + assert_eq!(*query_pairs.get("state").unwrap(), validation_data.state); + assert_eq!(query_pairs.get("nonce").unwrap(), "ox0PigY5l9xl5uTL"); + assert_eq!(query_pairs.get("code_challenge"), None); + assert_eq!(query_pairs.get("code_challenge_method"), None); +} + +/// Check if the given request to the token endpoint is valid. +fn is_valid_token_endpoint_request(req: &Request) -> bool { + let body = form_urlencoded::parse(&req.body).collect::>(); + + if body.get("client_id").filter(|s| *s == CLIENT_ID).is_none() { + println!("Missing or wrong client ID"); + return false; + } + if body + .get("grant_type") + .filter(|s| *s == "authorization_code") + .is_none() + { + println!("Missing or wrong grant type"); + return false; + } + if body + .get("code") + .filter(|s| *s == AUTHORIZATION_CODE) + .is_none() + { + println!("Missing or wrong authorization code"); + return false; + } + if body + .get("redirect_uri") + .filter(|s| *s == REDIRECT_URI) + .is_none() + { + println!("Missing or wrong redirect URI"); + return false; + } + + if body + .get("code_verifier") + .filter(|s| *s == CODE_VERIFIER) + .is_none() + { + println!("Missing or wrong code verifier"); + return false; + } + + true +} + +#[tokio::test] +async fn pass_access_token_with_authorization_code() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = client_credentials(&OAuthClientAuthenticationMethod::None, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + let redirect_uri = Url::parse(REDIRECT_URI).unwrap(); + let validation_data = AuthorizationValidationData { + state: "some_state".to_owned(), + nonce: Some(NONCE.to_owned()), + redirect_uri, + code_challenge_verifier: Some(CODE_VERIFIER.to_owned()), + }; + + let (id_token, jwks) = id_token(issuer.as_str()); + let id_token_verification_data = JwtVerificationData { + issuer: Some(issuer.as_str()), + jwks: &jwks, + client_id: &CLIENT_ID.to_owned(), + signing_algorithm: &ID_TOKEN_SIGNING_ALG, + }; + + Mock::given(method("POST")) + .and(path("/token")) + .and(is_valid_token_endpoint_request) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: Some(id_token.to_string()), + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: Some([OPENID].into_iter().collect()), + }), + ) + .mount(&mock_server) + .await; + + let (response, response_id_token) = access_token_with_authorization_code( + &http_client, + client_credentials, + &token_endpoint, + AUTHORIZATION_CODE.to_owned(), + validation_data, + Some(id_token_verification_data), + now(), + &mut rng, + ) + .await + .unwrap(); + + assert_eq!(response.access_token, ACCESS_TOKEN); + assert_eq!(response.refresh_token, None); + assert!(response.scope.unwrap().contains("openid")); + assert_eq!(response_id_token.unwrap().as_str(), id_token.as_str()); +} + +#[tokio::test] +async fn fail_access_token_with_authorization_code_wrong_nonce() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = client_credentials(&OAuthClientAuthenticationMethod::None, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + let redirect_uri = Url::parse(REDIRECT_URI).unwrap(); + let validation_data = AuthorizationValidationData { + state: "some_state".to_owned(), + nonce: Some("wrong_nonce".to_owned()), + redirect_uri, + code_challenge_verifier: Some(CODE_VERIFIER.to_owned()), + }; + + let (id_token, jwks) = id_token(issuer.as_str()); + let id_token_verification_data = JwtVerificationData { + issuer: Some(issuer.as_str()), + jwks: &jwks, + client_id: &CLIENT_ID.to_owned(), + signing_algorithm: &ID_TOKEN_SIGNING_ALG, + }; + + Mock::given(method("POST")) + .and(path("/token")) + .and(is_valid_token_endpoint_request) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: Some(id_token.into_string()), + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: Some([OPENID].into_iter().collect()), + }), + ) + .mount(&mock_server) + .await; + + let error = access_token_with_authorization_code( + &http_client, + client_credentials, + &token_endpoint, + AUTHORIZATION_CODE.to_owned(), + validation_data, + Some(id_token_verification_data), + now(), + &mut rng, + ) + .await + .unwrap_err(); + + assert_matches!( + error, + TokenAuthorizationCodeError::IdToken(IdTokenError::Claim(ClaimError::ValidationError { + claim: "nonce", + .. + })) + ); +} + +#[tokio::test] +async fn fail_access_token_with_authorization_code_no_id_token() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = client_credentials(&OAuthClientAuthenticationMethod::None, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + let redirect_uri = Url::parse(REDIRECT_URI).unwrap(); + let nonce = "some_nonce".to_owned(); + let validation_data = AuthorizationValidationData { + state: "some_state".to_owned(), + nonce: Some(nonce.clone()), + redirect_uri, + code_challenge_verifier: Some(CODE_VERIFIER.to_owned()), + }; + + let id_token_verification_data = JwtVerificationData { + issuer: Some(issuer.as_str()), + jwks: &PublicJsonWebKeySet::default(), + client_id: &CLIENT_ID.to_owned(), + signing_algorithm: &ID_TOKEN_SIGNING_ALG, + }; + + Mock::given(method("POST")) + .and(path("/token")) + .and(is_valid_token_endpoint_request) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: None, + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: Some([OPENID].into_iter().collect()), + }), + ) + .mount(&mock_server) + .await; + + let error = access_token_with_authorization_code( + &http_client, + client_credentials, + &token_endpoint, + AUTHORIZATION_CODE.to_owned(), + validation_data, + Some(id_token_verification_data), + now(), + &mut rng, + ) + .await + .unwrap_err(); + + assert_matches!( + error, + TokenAuthorizationCodeError::IdToken(IdTokenError::MissingIdToken) + ); +} diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/requests/client_credentials.rs b/matrix-authentication-service/crates/oidc-client/tests/it/requests/client_credentials.rs new file mode 100644 index 00000000..00b3c774 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/requests/client_credentials.rs @@ -0,0 +1,99 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashMap; + +use mas_iana::oauth::{OAuthAccessTokenType, OAuthClientAuthenticationMethod}; +use mas_oidc_client::requests::client_credentials::access_token_with_client_credentials; +use oauth2_types::{ + requests::AccessTokenResponse, + scope::{PROFILE, Scope}, +}; +use rand::SeedableRng; +use wiremock::{ + Mock, Request, ResponseTemplate, + matchers::{method, path}, +}; + +use crate::{ACCESS_TOKEN, CLIENT_ID, CLIENT_SECRET, client_credentials, init_test, now}; + +#[tokio::test] +async fn pass_access_token_with_client_credentials() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = + client_credentials(&OAuthClientAuthenticationMethod::ClientSecretPost, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let scope = [PROFILE].into_iter().collect::(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + Mock::given(method("POST")) + .and(path("/token")) + .and(|req: &Request| { + let query_pairs = form_urlencoded::parse(&req.body).collect::>(); + + if query_pairs + .get("grant_type") + .filter(|s| *s == "client_credentials") + .is_none() + { + println!("Wrong or missing grant type"); + return false; + } + if query_pairs + .get("scope") + .filter(|s| *s == "profile") + .is_none() + { + println!("Wrong or missing scope"); + return false; + } + if query_pairs + .get("client_id") + .filter(|s| *s == CLIENT_ID) + .is_none() + { + println!("Wrong or missing client ID"); + return false; + } + if query_pairs + .get("client_secret") + .filter(|s| *s == CLIENT_SECRET) + .is_none() + { + println!("Wrong or missing client secret"); + return false; + } + + true + }) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: None, + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: Some(scope.clone()), + }), + ) + .mount(&mock_server) + .await; + + let response = access_token_with_client_credentials( + &http_client, + client_credentials, + &token_endpoint, + Some(scope), + now(), + &mut rng, + ) + .await + .unwrap(); + + assert_eq!(response.access_token, ACCESS_TOKEN); + assert_eq!(response.refresh_token, None); + assert!(response.scope.unwrap().contains("profile")); +} diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/requests/discovery.rs b/matrix-authentication-service/crates/oidc-client/tests/it/requests/discovery.rs new file mode 100644 index 00000000..cacdf7d2 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/requests/discovery.rs @@ -0,0 +1,91 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use assert_matches::assert_matches; +use mas_iana::oauth::{OAuthAuthorizationEndpointResponseType, PkceCodeChallengeMethod}; +use mas_jose::jwa::SUPPORTED_SIGNING_ALGORITHMS; +use mas_oidc_client::{ + error::DiscoveryError, + requests::discovery::{discover, insecure_discover}, +}; +use oauth2_types::oidc::{ProviderMetadata, SubjectType}; +use url::Url; +use wiremock::{ + Mock, ResponseTemplate, + matchers::{method, path}, +}; + +use crate::init_test; + +fn provider_metadata(issuer: &Url) -> ProviderMetadata { + ProviderMetadata { + issuer: Some(issuer.as_str().to_owned()), + authorization_endpoint: issuer.join("authorize").ok(), + token_endpoint: issuer.join("token").ok(), + jwks_uri: issuer.join("jwks").ok(), + response_types_supported: Some(vec![OAuthAuthorizationEndpointResponseType::Code.into()]), + subject_types_supported: Some(vec![SubjectType::Pairwise, SubjectType::Public]), + id_token_signing_alg_values_supported: Some(SUPPORTED_SIGNING_ALGORITHMS.into()), + code_challenge_methods_supported: Some(vec![PkceCodeChallengeMethod::S256]), + ..Default::default() + } +} + +#[tokio::test] +async fn pass_discover() { + let (http_client, mock_server, issuer) = init_test().await; + + Mock::given(method("GET")) + .and(path("/.well-known/openid-configuration")) + .respond_with(ResponseTemplate::new(200).set_body_json(provider_metadata(&issuer))) + .mount(&mock_server) + .await; + + let provider_metadata = insecure_discover(&http_client, issuer.as_str()) + .await + .unwrap(); + + assert_eq!(provider_metadata.issuer(), issuer.as_str()); +} + +#[tokio::test] +async fn fail_discover_404() { + let (http_client, _mock_server, issuer) = init_test().await; + + let error = discover(&http_client, issuer.as_str()).await.unwrap_err(); + + assert_matches!(error, DiscoveryError::Http(_)); +} + +#[tokio::test] +async fn fail_discover_not_json() { + let (http_client, mock_server, issuer) = init_test().await; + + Mock::given(method("GET")) + .and(path("/.well-known/openid-configuration")) + .respond_with(ResponseTemplate::new(200)) + .mount(&mock_server) + .await; + + let error = discover(&http_client, issuer.as_str()).await.unwrap_err(); + + assert_matches!(error, DiscoveryError::Http(_)); +} + +#[tokio::test] +async fn fail_discover_invalid_metadata() { + let (http_client, mock_server, issuer) = init_test().await; + + Mock::given(method("GET")) + .and(path("/.well-known/openid-configuration")) + .respond_with(ResponseTemplate::new(200).set_body_json(ProviderMetadata::default())) + .mount(&mock_server) + .await; + + let error = discover(&http_client, issuer.as_str()).await.unwrap_err(); + + assert_matches!(error, DiscoveryError::Validation(_)); +} diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/requests/jose.rs b/matrix-authentication-service/crates/oidc-client/tests/it/requests/jose.rs new file mode 100644 index 00000000..6adedc57 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/requests/jose.rs @@ -0,0 +1,242 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashMap; + +use assert_matches::assert_matches; +use chrono::{DateTime, Duration, Utc}; +use mas_iana::jose::JsonWebSignatureAlg; +use mas_jose::{ + claims::{self, ClaimError}, + constraints::Constrainable, + jwk::PublicJsonWebKeySet, + jwt::{JsonWebSignatureHeader, Jwt}, +}; +use mas_oidc_client::{ + error::{IdTokenError, JwtVerificationError}, + requests::jose::{JwtVerificationData, verify_id_token}, + types::IdToken, +}; + +use crate::{CLIENT_ID, ID_TOKEN_SIGNING_ALG, SUBJECT_IDENTIFIER, keystore, now}; + +#[derive(Clone, Copy, PartialEq, Eq)] +enum IdTokenFlag { + WrongExpiration, + WrongSubject, +} + +/// Generate an ID token with the given settings. +fn id_token( + issuer: &str, + flag: Option, + auth_time: Option>, +) -> (IdToken<'_>, PublicJsonWebKeySet) { + let signing_alg = ID_TOKEN_SIGNING_ALG; + + let keystore = keystore(&signing_alg); + let mut claims = HashMap::new(); + let now = now(); + + claims::ISS.insert(&mut claims, issuer.to_owned()).unwrap(); + claims::AUD + .insert(&mut claims, CLIENT_ID.to_owned()) + .unwrap(); + + if flag == Some(IdTokenFlag::WrongSubject) { + claims::SUB + .insert(&mut claims, "wrong_subject".to_owned()) + .unwrap(); + } else { + claims::SUB + .insert(&mut claims, SUBJECT_IDENTIFIER.to_owned()) + .unwrap(); + } + + claims::IAT.insert(&mut claims, now).unwrap(); + + if flag == Some(IdTokenFlag::WrongExpiration) { + claims::EXP + .insert(&mut claims, now - Duration::try_hours(1).unwrap()) + .unwrap(); + } else { + claims::EXP + .insert(&mut claims, now + Duration::try_hours(1).unwrap()) + .unwrap(); + } + + if let Some(auth_time) = auth_time { + claims::AUTH_TIME.insert(&mut claims, auth_time).unwrap(); + } + + let key = keystore.signing_key_for_algorithm(&signing_alg).unwrap(); + let signer = key.params().signing_key_for_alg(&signing_alg).unwrap(); + let header = JsonWebSignatureHeader::new(signing_alg).with_kid(key.kid().unwrap()); + let id_token = Jwt::sign(header, claims, &signer).unwrap(); + + (id_token, keystore.public_jwks()) +} + +#[tokio::test] +async fn pass_verify_id_token() { + let issuer = "http://localhost/"; + let now = now(); + let (auth_id_token, _) = id_token(issuer, None, Some(now)); + let (id_token, jwks) = id_token(issuer, None, Some(now)); + + let verification_data = JwtVerificationData { + issuer: Some(issuer), + jwks: &jwks, + client_id: &CLIENT_ID.to_owned(), + signing_algorithm: &ID_TOKEN_SIGNING_ALG, + }; + + verify_id_token( + id_token.as_str(), + verification_data, + Some(&auth_id_token), + now, + ) + .unwrap(); +} + +#[tokio::test] +async fn fail_verify_id_token_wrong_issuer() { + let issuer = "http://localhost/"; + let wrong_issuer = "http://distanthost/"; + let (id_token, jwks) = id_token(issuer, None, None); + let now = now(); + + let verification_data = JwtVerificationData { + issuer: Some(wrong_issuer), + jwks: &jwks, + client_id: &CLIENT_ID.to_owned(), + signing_algorithm: &ID_TOKEN_SIGNING_ALG, + }; + + let error = verify_id_token(id_token.as_str(), verification_data, None, now).unwrap_err(); + + assert_matches!( + error, + IdTokenError::Jwt(JwtVerificationError::Claim(ClaimError::ValidationError { + claim: "iss", + .. + })) + ); +} + +#[tokio::test] +async fn fail_verify_id_token_wrong_audience() { + let issuer = "http://localhost/"; + let (id_token, jwks) = id_token(issuer, None, None); + let now = now(); + + let verification_data = JwtVerificationData { + issuer: Some(issuer), + jwks: &jwks, + client_id: &"wrong_client_id".to_owned(), + signing_algorithm: &ID_TOKEN_SIGNING_ALG, + }; + + let error = verify_id_token(id_token.as_str(), verification_data, None, now).unwrap_err(); + + assert_matches!( + error, + IdTokenError::Jwt(JwtVerificationError::Claim(ClaimError::ValidationError { + claim: "aud", + .. + })) + ); +} + +#[tokio::test] +async fn fail_verify_id_token_wrong_signing_algorithm() { + let issuer = "http://localhost/"; + let (id_token, jwks) = id_token(issuer, None, None); + let now = now(); + + let verification_data = JwtVerificationData { + issuer: Some(issuer), + jwks: &jwks, + client_id: &CLIENT_ID.to_owned(), + signing_algorithm: &JsonWebSignatureAlg::Unknown("wrong_algorithm".to_owned()), + }; + + let error = verify_id_token(id_token.as_str(), verification_data, None, now).unwrap_err(); + + assert_matches!( + error, + IdTokenError::Jwt(JwtVerificationError::WrongSignatureAlg) + ); +} + +#[tokio::test] +async fn fail_verify_id_token_wrong_expiration() { + let issuer = "http://localhost/"; + let (id_token, jwks) = id_token(issuer, Some(IdTokenFlag::WrongExpiration), None); + let now = now(); + + let verification_data = JwtVerificationData { + issuer: Some(issuer), + jwks: &jwks, + client_id: &CLIENT_ID.to_owned(), + signing_algorithm: &ID_TOKEN_SIGNING_ALG, + }; + + let error = verify_id_token(id_token.as_str(), verification_data, None, now).unwrap_err(); + + assert_matches!(error, IdTokenError::Claim(_)); +} + +#[tokio::test] +async fn fail_verify_id_token_wrong_subject() { + let issuer = "http://localhost/"; + let now = now(); + let (auth_id_token, _) = id_token(issuer, None, Some(now)); + let (id_token, jwks) = id_token(issuer, Some(IdTokenFlag::WrongSubject), None); + + let verification_data = JwtVerificationData { + issuer: Some(issuer), + jwks: &jwks, + client_id: &CLIENT_ID.to_owned(), + signing_algorithm: &ID_TOKEN_SIGNING_ALG, + }; + + let error = verify_id_token( + id_token.as_str(), + verification_data, + Some(&auth_id_token), + now, + ) + .unwrap_err(); + + assert_matches!(error, IdTokenError::WrongSubjectIdentifier); +} + +#[tokio::test] +async fn fail_verify_id_token_wrong_auth_time() { + let issuer = "http://localhost/"; + let now = now(); + let (auth_id_token, _) = id_token(issuer, None, Some(now)); + let (id_token, jwks) = id_token(issuer, None, Some(now + Duration::try_hours(1).unwrap())); + + let verification_data = JwtVerificationData { + issuer: Some(issuer), + jwks: &jwks, + client_id: &CLIENT_ID.to_owned(), + signing_algorithm: &ID_TOKEN_SIGNING_ALG, + }; + + let error = verify_id_token( + id_token.as_str(), + verification_data, + Some(&auth_id_token), + now, + ) + .unwrap_err(); + + assert_matches!(error, IdTokenError::WrongAuthTime); +} diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/requests/mod.rs b/matrix-authentication-service/crates/oidc-client/tests/it/requests/mod.rs new file mode 100644 index 00000000..fce40a2e --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/requests/mod.rs @@ -0,0 +1,12 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod authorization_code; +mod client_credentials; +mod discovery; +mod jose; +mod refresh_token; +mod userinfo; diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/requests/refresh_token.rs b/matrix-authentication-service/crates/oidc-client/tests/it/requests/refresh_token.rs new file mode 100644 index 00000000..9b6e7c39 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/requests/refresh_token.rs @@ -0,0 +1,90 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashMap; + +use assert_matches::assert_matches; +use mas_iana::oauth::{OAuthAccessTokenType, OAuthClientAuthenticationMethod}; +use mas_oidc_client::requests::refresh_token::refresh_access_token; +use oauth2_types::requests::AccessTokenResponse; +use rand::SeedableRng; +use wiremock::{ + Mock, Request, ResponseTemplate, + matchers::{method, path}, +}; + +use crate::{ACCESS_TOKEN, CLIENT_ID, REFRESH_TOKEN, client_credentials, init_test, now}; + +#[tokio::test] +async fn pass_refresh_access_token() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = client_credentials(&OAuthClientAuthenticationMethod::None, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + Mock::given(method("POST")) + .and(path("/token")) + .and(|req: &Request| { + let query_pairs = form_urlencoded::parse(&req.body).collect::>(); + + if query_pairs + .get("grant_type") + .filter(|s| *s == "refresh_token") + .is_none() + { + println!("Wrong or missing grant type"); + return false; + } + if query_pairs + .get("refresh_token") + .filter(|s| *s == REFRESH_TOKEN) + .is_none() + { + println!("Wrong or missing refresh token"); + return false; + } + if query_pairs + .get("client_id") + .filter(|s| *s == CLIENT_ID) + .is_none() + { + println!("Wrong or missing client ID"); + return false; + } + + true + }) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: None, + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: None, + }), + ) + .mount(&mock_server) + .await; + + let (response, response_id_token) = refresh_access_token( + &http_client, + client_credentials, + &token_endpoint, + REFRESH_TOKEN.to_owned(), + None, + None, + None, + now(), + &mut rng, + ) + .await + .unwrap(); + + assert_eq!(response.access_token, ACCESS_TOKEN); + assert_eq!(response.refresh_token, None); + assert_matches!(response_id_token, None); +} diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/requests/userinfo.rs b/matrix-authentication-service/crates/oidc-client/tests/it/requests/userinfo.rs new file mode 100644 index 00000000..f7d979ab --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/requests/userinfo.rs @@ -0,0 +1,39 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_oidc_client::requests::userinfo::fetch_userinfo; +use serde_json::json; +use wiremock::{ + Mock, ResponseTemplate, + matchers::{header, method, path}, +}; + +use crate::{ACCESS_TOKEN, SUBJECT_IDENTIFIER, init_test}; + +#[tokio::test] +async fn pass_fetch_userinfo() { + let (http_client, mock_server, issuer) = init_test().await; + let userinfo_endpoint = issuer.join("userinfo").unwrap(); + + Mock::given(method("GET")) + .and(path("/userinfo")) + .and(header( + "authorization", + format!("Bearer {ACCESS_TOKEN}").as_str(), + )) + .respond_with(ResponseTemplate::new(200).set_body_json(json!({ + "sub": SUBJECT_IDENTIFIER, + "email": "janedoe@example.com", + }))) + .mount(&mock_server) + .await; + + let claims = fetch_userinfo(&http_client, &userinfo_endpoint, ACCESS_TOKEN, None) + .await + .unwrap(); + + assert_eq!(claims.get("email").unwrap(), "janedoe@example.com"); +} diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/types/client_credentials.rs b/matrix-authentication-service/crates/oidc-client/tests/it/types/client_credentials.rs new file mode 100644 index 00000000..c53a98e0 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/types/client_credentials.rs @@ -0,0 +1,343 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashMap; + +use base64ct::Encoding; +use http::header::AUTHORIZATION; +use mas_iana::oauth::{OAuthAccessTokenType, OAuthClientAuthenticationMethod}; +use mas_jose::{ + claims::{self, TimeOptions}, + jwt::Jwt, +}; +use mas_oidc_client::{ + requests::client_credentials::access_token_with_client_credentials, + types::client_credentials::ClientCredentials, +}; +use oauth2_types::requests::AccessTokenResponse; +use rand::SeedableRng; +use serde_json::Value; +use wiremock::{ + Mock, Request, ResponseTemplate, + matchers::{header, method, path}, +}; + +use crate::{ACCESS_TOKEN, CLIENT_ID, CLIENT_SECRET, client_credentials, init_test, now}; + +#[tokio::test] +async fn pass_none() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = client_credentials(&OAuthClientAuthenticationMethod::None, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + Mock::given(method("POST")) + .and(path("/token")) + .and(|req: &Request| { + let query_pairs = form_urlencoded::parse(&req.body).collect::>(); + + if query_pairs + .get("client_id") + .filter(|s| *s == CLIENT_ID) + .is_none() + { + println!("Wrong or missing client ID"); + return false; + } + + true + }) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: None, + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: None, + }), + ) + .mount(&mock_server) + .await; + + access_token_with_client_credentials( + &http_client, + client_credentials, + &token_endpoint, + None, + now(), + &mut rng, + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn pass_client_secret_basic() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = + client_credentials(&OAuthClientAuthenticationMethod::ClientSecretBasic, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + let username = form_urlencoded::byte_serialize(CLIENT_ID.as_bytes()).collect::(); + let password = form_urlencoded::byte_serialize(CLIENT_SECRET.as_bytes()).collect::(); + let enc_user_pass = + base64ct::Base64::encode_string(format!("{username}:{password}").as_bytes()); + let authorization_header = format!("Basic {enc_user_pass}"); + + Mock::given(method("POST")) + .and(path("/token")) + .and(header(AUTHORIZATION, authorization_header.as_str())) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: None, + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: None, + }), + ) + .mount(&mock_server) + .await; + + access_token_with_client_credentials( + &http_client, + client_credentials, + &token_endpoint, + None, + now(), + &mut rng, + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn pass_client_secret_post() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = + client_credentials(&OAuthClientAuthenticationMethod::ClientSecretPost, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + Mock::given(method("POST")) + .and(path("/token")) + .and(|req: &Request| { + let query_pairs = form_urlencoded::parse(&req.body).collect::>(); + + if query_pairs + .get("client_id") + .filter(|s| *s == CLIENT_ID) + .is_none() + { + println!("Wrong or missing client ID"); + return false; + } + if query_pairs + .get("client_secret") + .filter(|s| *s == CLIENT_SECRET) + .is_none() + { + println!("Wrong or missing client secret"); + return false; + } + + true + }) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: None, + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: None, + }), + ) + .mount(&mock_server) + .await; + + access_token_with_client_credentials( + &http_client, + client_credentials, + &token_endpoint, + None, + now(), + &mut rng, + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn pass_client_secret_jwt() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = + client_credentials(&OAuthClientAuthenticationMethod::ClientSecretJwt, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let endpoint = token_endpoint.to_string(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + Mock::given(method("POST")) + .and(path("/token")) + .and(move |req: &Request| { + let query_pairs = form_urlencoded::parse(&req.body).collect::>(); + + if query_pairs.contains_key("client_id") { + println!("`client_secret_jwt` client authentication should not use `client_id`"); + return false; + } + if query_pairs + .get("client_assertion_type") + .filter(|s| *s == "urn:ietf:params:oauth:client-assertion-type:jwt-bearer") + .is_none() + { + println!("Wrong or missing client assertion type"); + return false; + } + + let Some(jwt) = query_pairs.get("client_assertion") else { + println!("Missing client assertion"); + return false; + }; + + let jwt = Jwt::>::try_from(jwt.as_ref()).unwrap(); + if jwt + .verify_with_shared_secret(CLIENT_SECRET.as_bytes().to_owned()) + .is_err() + { + println!("Client assertion signature verification failed"); + return false; + } + + let mut claims = jwt.into_parts().1; + if let Err(error) = verify_client_jwt(&mut claims, &endpoint) { + println!("Client assertion claims verification failed: {error}"); + return false; + } + + true + }) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: None, + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: None, + }), + ) + .mount(&mock_server) + .await; + + access_token_with_client_credentials( + &http_client, + client_credentials, + &token_endpoint, + None, + now(), + &mut rng, + ) + .await + .unwrap(); +} + +#[tokio::test] +async fn pass_private_key_jwt() { + let (http_client, mock_server, issuer) = init_test().await; + let client_credentials = + client_credentials(&OAuthClientAuthenticationMethod::PrivateKeyJwt, &issuer); + let token_endpoint = issuer.join("token").unwrap(); + let endpoint = token_endpoint.to_string(); + let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(42); + + let ClientCredentials::PrivateKeyJwt { keystore, .. } = &client_credentials else { + panic!("should be PrivateKeyJwt") + }; + let client_jwks = keystore.public_jwks(); + + Mock::given(method("POST")) + .and(path("/token")) + .and(move |req: &Request| { + let query_pairs = form_urlencoded::parse(&req.body).collect::>(); + + if query_pairs.contains_key("client_id") { + println!("`private_key_jwt` client authentication should not use `client_id`"); + return false; + } + if query_pairs + .get("client_assertion_type") + .filter(|s| *s == "urn:ietf:params:oauth:client-assertion-type:jwt-bearer") + .is_none() + { + println!("Wrong or missing client assertion type"); + return false; + } + + let Some(jwt) = query_pairs.get("client_assertion") else { + println!("Missing client assertion"); + return false; + }; + + let jwt = Jwt::>::try_from(jwt.as_ref()).unwrap(); + if jwt.verify_with_jwks(&client_jwks).is_err() { + println!("Client assertion signature verification failed"); + return false; + } + + let mut claims = jwt.into_parts().1; + if let Err(error) = verify_client_jwt(&mut claims, &endpoint) { + println!("Client assertion claims verification failed: {error}"); + return false; + } + + true + }) + .respond_with( + ResponseTemplate::new(200).set_body_json(AccessTokenResponse { + access_token: ACCESS_TOKEN.to_owned(), + refresh_token: None, + id_token: None, + token_type: OAuthAccessTokenType::Bearer, + expires_in: None, + scope: None, + }), + ) + .mount(&mock_server) + .await; + + access_token_with_client_credentials( + &http_client, + client_credentials, + &token_endpoint, + None, + now(), + &mut rng, + ) + .await + .unwrap(); +} + +fn verify_client_jwt( + claims: &mut HashMap, + token_endpoint: &String, +) -> Result<(), Box> { + claims::ISS.extract_required_with_options(claims, CLIENT_ID)?; + + let sub = claims::SUB.extract_required(claims)?; + if sub != CLIENT_ID { + return Err("Wrong sub".into()); + } + + claims::AUD.extract_required_with_options(claims, token_endpoint)?; + + claims::EXP.extract_required_with_options(claims, TimeOptions::new(now()))?; + + Ok(()) +} diff --git a/matrix-authentication-service/crates/oidc-client/tests/it/types/mod.rs b/matrix-authentication-service/crates/oidc-client/tests/it/types/mod.rs new file mode 100644 index 00000000..a09d66a7 --- /dev/null +++ b/matrix-authentication-service/crates/oidc-client/tests/it/types/mod.rs @@ -0,0 +1,7 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 Kévin Commaille. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod client_credentials; diff --git a/matrix-authentication-service/crates/policy/Cargo.toml b/matrix-authentication-service/crates/policy/Cargo.toml new file mode 100644 index 00000000..7496f272 --- /dev/null +++ b/matrix-authentication-service/crates/policy/Cargo.toml @@ -0,0 +1,34 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-policy" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +arc-swap.workspace = true +opa-wasm.workspace = true +schemars.workspace = true +serde_json.workspace = true +serde.workspace = true +thiserror.workspace = true +tokio.workspace = true +tracing.workspace = true + +mas-data-model.workspace = true +oauth2-types.workspace = true + +[[bin]] +name = "schema" diff --git a/matrix-authentication-service/crates/policy/src/bin/schema.rs b/matrix-authentication-service/crates/policy/src/bin/schema.rs new file mode 100644 index 00000000..be778f6e --- /dev/null +++ b/matrix-authentication-service/crates/policy/src/bin/schema.rs @@ -0,0 +1,47 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![expect( + clippy::disallowed_types, + reason = "We use Path/PathBuf instead of camino here for simplicity" +)] + +use std::path::{Path, PathBuf}; + +use mas_policy::model::{ + AuthorizationGrantInput, ClientRegistrationInput, CompatLoginInput, EmailInput, RegisterInput, +}; +use schemars::{JsonSchema, generate::SchemaSettings}; + +fn write_schema(out_dir: Option<&Path>, file: &str) { + let mut writer: Box = if let Some(out_dir) = out_dir { + let path = out_dir.join(file); + eprintln!("Writing to {}", path.display()); + let file = std::fs::File::create(path).expect("Failed to create file"); + Box::new(std::io::BufWriter::new(file)) + } else { + eprintln!("--- {file} ---"); + Box::new(std::io::stdout()) + }; + + let generator = SchemaSettings::draft07().into_generator(); + let schema = generator.into_root_schema_for::(); + serde_json::to_writer_pretty(&mut writer, &schema).expect("Failed to serialize schema"); + writer.flush().expect("Failed to flush writer"); +} + +/// Write the input schemas to the output directory. +/// They are then used in rego files to type check the input. +fn main() { + let output_root = std::env::var("OUT_DIR").map(PathBuf::from).ok(); + let output_root = output_root.as_deref(); + + write_schema::(output_root, "register_input.json"); + write_schema::(output_root, "client_registration_input.json"); + write_schema::(output_root, "authorization_grant_input.json"); + write_schema::(output_root, "compat_login_input.json"); + write_schema::(output_root, "email_input.json"); +} diff --git a/matrix-authentication-service/crates/policy/src/lib.rs b/matrix-authentication-service/crates/policy/src/lib.rs new file mode 100644 index 00000000..dcb68dd3 --- /dev/null +++ b/matrix-authentication-service/crates/policy/src/lib.rs @@ -0,0 +1,725 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +pub mod model; + +use std::sync::Arc; + +use arc_swap::ArcSwap; +use mas_data_model::{SessionLimitConfig, Ulid}; +use opa_wasm::{ + Runtime, + wasmtime::{Config, Engine, Module, OptLevel, Store}, +}; +use serde::Serialize; +use thiserror::Error; +use tokio::io::{AsyncRead, AsyncReadExt}; + +pub use self::model::{ + AuthorizationGrantInput, ClientRegistrationInput, Code as ViolationCode, CompatLoginInput, + EmailInput, EvaluationResult, GrantType, RegisterInput, RegistrationMethod, Requester, + Violation, +}; + +#[derive(Debug, Error)] +pub enum LoadError { + #[error("failed to read module")] + Read(#[from] tokio::io::Error), + + #[error("failed to create WASM engine")] + Engine(#[source] anyhow::Error), + + #[error("module compilation task crashed")] + CompilationTask(#[from] tokio::task::JoinError), + + #[error("failed to compile WASM module")] + Compilation(#[source] anyhow::Error), + + #[error("invalid policy data")] + InvalidData(#[source] anyhow::Error), + + #[error("failed to instantiate a test instance")] + Instantiate(#[source] InstantiateError), +} + +impl LoadError { + /// Creates an example of an invalid data error, used for API response + /// documentation + #[doc(hidden)] + #[must_use] + pub fn invalid_data_example() -> Self { + Self::InvalidData(anyhow::Error::msg("Failed to merge policy data objects")) + } +} + +#[derive(Debug, Error)] +pub enum InstantiateError { + #[error("failed to create WASM runtime")] + Runtime(#[source] anyhow::Error), + + #[error("missing entrypoint {entrypoint}")] + MissingEntrypoint { entrypoint: String }, + + #[error("failed to load policy data")] + LoadData(#[source] anyhow::Error), +} + +/// Holds the entrypoint of each policy +#[derive(Debug, Clone)] +pub struct Entrypoints { + pub register: String, + pub client_registration: String, + pub authorization_grant: String, + pub compat_login: String, + pub email: String, +} + +impl Entrypoints { + fn all(&self) -> [&str; 5] { + [ + self.register.as_str(), + self.client_registration.as_str(), + self.authorization_grant.as_str(), + self.compat_login.as_str(), + self.email.as_str(), + ] + } +} + +#[derive(Debug)] +pub struct Data { + base: BaseData, + + // We will merge this in a custom way, so don't emit as part of the base + rest: Option, +} + +#[derive(Serialize, Debug)] +struct BaseData { + server_name: String, + + /// Limits on the number of application sessions that each user can have + session_limit: Option, +} + +impl Data { + #[must_use] + pub fn new(server_name: String, session_limit: Option) -> Self { + Self { + base: BaseData { + server_name, + session_limit, + }, + + rest: None, + } + } + + #[must_use] + pub fn with_rest(mut self, rest: serde_json::Value) -> Self { + self.rest = Some(rest); + self + } + + fn to_value(&self) -> Result { + let base = serde_json::to_value(&self.base)?; + + if let Some(rest) = &self.rest { + merge_data(base, rest.clone()) + } else { + Ok(base) + } + } +} + +fn value_kind(value: &serde_json::Value) -> &'static str { + match value { + serde_json::Value::Object(_) => "object", + serde_json::Value::Array(_) => "array", + serde_json::Value::String(_) => "string", + serde_json::Value::Number(_) => "number", + serde_json::Value::Bool(_) => "boolean", + serde_json::Value::Null => "null", + } +} + +fn merge_data( + mut left: serde_json::Value, + right: serde_json::Value, +) -> Result { + merge_data_rec(&mut left, right)?; + Ok(left) +} + +fn merge_data_rec( + left: &mut serde_json::Value, + right: serde_json::Value, +) -> Result<(), anyhow::Error> { + match (left, right) { + (serde_json::Value::Object(left), serde_json::Value::Object(right)) => { + for (key, value) in right { + if let Some(left_value) = left.get_mut(&key) { + merge_data_rec(left_value, value)?; + } else { + left.insert(key, value); + } + } + } + (serde_json::Value::Array(left), serde_json::Value::Array(right)) => { + left.extend(right); + } + // Other values override + (serde_json::Value::Number(left), serde_json::Value::Number(right)) => { + *left = right; + } + (serde_json::Value::Bool(left), serde_json::Value::Bool(right)) => { + *left = right; + } + (serde_json::Value::String(left), serde_json::Value::String(right)) => { + *left = right; + } + + // Null gets overridden by anything + (left, right) if left.is_null() => *left = right, + + // Null on the right makes the left value null + (left, right) if right.is_null() => *left = right, + + (left, right) => anyhow::bail!( + "Cannot merge a {} into a {}", + value_kind(&right), + value_kind(left), + ), + } + + Ok(()) +} + +struct DynamicData { + version: Option, + merged: serde_json::Value, +} + +pub struct PolicyFactory { + engine: Engine, + module: Module, + data: Data, + dynamic_data: ArcSwap, + entrypoints: Entrypoints, +} + +impl PolicyFactory { + /// Load the policy from the given data source. + /// + /// # Errors + /// + /// Returns an error if the policy can't be loaded or instantiated. + #[tracing::instrument(name = "policy.load", skip(source))] + pub async fn load( + mut source: impl AsyncRead + std::marker::Unpin, + data: Data, + entrypoints: Entrypoints, + ) -> Result { + let mut config = Config::default(); + config.async_support(true); + config.cranelift_opt_level(OptLevel::SpeedAndSize); + + let engine = Engine::new(&config).map_err(LoadError::Engine)?; + + // Read and compile the module + let mut buf = Vec::new(); + source.read_to_end(&mut buf).await?; + // Compilation is CPU-bound, so spawn that in a blocking task + let (engine, module) = tokio::task::spawn_blocking(move || { + let module = Module::new(&engine, buf)?; + anyhow::Ok((engine, module)) + }) + .await? + .map_err(LoadError::Compilation)?; + + let merged = data.to_value().map_err(LoadError::InvalidData)?; + let dynamic_data = ArcSwap::new(Arc::new(DynamicData { + version: None, + merged, + })); + + let factory = Self { + engine, + module, + data, + dynamic_data, + entrypoints, + }; + + // Try to instantiate + factory + .instantiate() + .await + .map_err(LoadError::Instantiate)?; + + Ok(factory) + } + + /// Set the dynamic data for the policy. + /// + /// The `dynamic_data` object is merged with the static data given when the + /// policy was loaded. + /// + /// Returns `true` if the data was updated, `false` if the version + /// of the dynamic data was the same as the one we already have. + /// + /// # Errors + /// + /// Returns an error if the data can't be merged with the static data, or if + /// the policy can't be instantiated with the new data. + pub async fn set_dynamic_data( + &self, + dynamic_data: mas_data_model::PolicyData, + ) -> Result { + // Check if the version of the dynamic data we have is the same as the one we're + // trying to set + if self.dynamic_data.load().version == Some(dynamic_data.id) { + // Don't do anything if the version is the same + return Ok(false); + } + + let static_data = self.data.to_value().map_err(LoadError::InvalidData)?; + let merged = merge_data(static_data, dynamic_data.data).map_err(LoadError::InvalidData)?; + + // Try to instantiate with the new data + self.instantiate_with_data(&merged) + .await + .map_err(LoadError::Instantiate)?; + + // If instantiation succeeds, swap the data + self.dynamic_data.store(Arc::new(DynamicData { + version: Some(dynamic_data.id), + merged, + })); + + Ok(true) + } + + /// Create a new policy instance. + /// + /// # Errors + /// + /// Returns an error if the policy can't be instantiated with the current + /// dynamic data. + #[tracing::instrument(name = "policy.instantiate", skip_all)] + pub async fn instantiate(&self) -> Result { + let data = self.dynamic_data.load(); + self.instantiate_with_data(&data.merged).await + } + + async fn instantiate_with_data( + &self, + data: &serde_json::Value, + ) -> Result { + let mut store = Store::new(&self.engine, ()); + let runtime = Runtime::new(&mut store, &self.module) + .await + .map_err(InstantiateError::Runtime)?; + + // Check that we have the required entrypoints + let policy_entrypoints = runtime.entrypoints(); + + for e in self.entrypoints.all() { + if !policy_entrypoints.contains(e) { + return Err(InstantiateError::MissingEntrypoint { + entrypoint: e.to_owned(), + }); + } + } + + let instance = runtime + .with_data(&mut store, data) + .await + .map_err(InstantiateError::LoadData)?; + + Ok(Policy { + store, + instance, + entrypoints: self.entrypoints.clone(), + }) + } +} + +pub struct Policy { + store: Store<()>, + instance: opa_wasm::Policy, + entrypoints: Entrypoints, +} + +#[derive(Debug, Error)] +#[error("failed to evaluate policy")] +pub enum EvaluationError { + Serialization(#[from] serde_json::Error), + Evaluation(#[from] anyhow::Error), +} + +impl Policy { + /// Evaluate the 'email' entrypoint. + /// + /// # Errors + /// + /// Returns an error if the policy engine fails to evaluate the entrypoint. + #[tracing::instrument( + name = "policy.evaluate_email", + skip_all, + fields( + %input.email, + ), + )] + pub async fn evaluate_email( + &mut self, + input: EmailInput<'_>, + ) -> Result { + let [res]: [EvaluationResult; 1] = self + .instance + .evaluate(&mut self.store, &self.entrypoints.email, &input) + .await?; + + Ok(res) + } + + /// Evaluate the 'register' entrypoint. + /// + /// # Errors + /// + /// Returns an error if the policy engine fails to evaluate the entrypoint. + #[tracing::instrument( + name = "policy.evaluate.register", + skip_all, + fields( + ?input.registration_method, + input.username = input.username, + input.email = input.email, + ), + )] + pub async fn evaluate_register( + &mut self, + input: RegisterInput<'_>, + ) -> Result { + let [res]: [EvaluationResult; 1] = self + .instance + .evaluate(&mut self.store, &self.entrypoints.register, &input) + .await?; + + Ok(res) + } + + /// Evaluate the `client_registration` entrypoint. + /// + /// # Errors + /// + /// Returns an error if the policy engine fails to evaluate the entrypoint. + #[tracing::instrument(skip(self))] + pub async fn evaluate_client_registration( + &mut self, + input: ClientRegistrationInput<'_>, + ) -> Result { + let [res]: [EvaluationResult; 1] = self + .instance + .evaluate( + &mut self.store, + &self.entrypoints.client_registration, + &input, + ) + .await?; + + Ok(res) + } + + /// Evaluate the `authorization_grant` entrypoint. + /// + /// # Errors + /// + /// Returns an error if the policy engine fails to evaluate the entrypoint. + #[tracing::instrument( + name = "policy.evaluate.authorization_grant", + skip_all, + fields( + %input.scope, + %input.client.id, + ), + )] + pub async fn evaluate_authorization_grant( + &mut self, + input: AuthorizationGrantInput<'_>, + ) -> Result { + let [res]: [EvaluationResult; 1] = self + .instance + .evaluate( + &mut self.store, + &self.entrypoints.authorization_grant, + &input, + ) + .await?; + + Ok(res) + } + + /// Evaluate the `compat_login` entrypoint. + /// + /// # Errors + /// + /// Returns an error if the policy engine fails to evaluate the entrypoint. + #[tracing::instrument( + name = "policy.evaluate.compat_login", + skip_all, + fields( + %input.user.id, + ), + )] + pub async fn evaluate_compat_login( + &mut self, + input: CompatLoginInput<'_>, + ) -> Result { + let [res]: [EvaluationResult; 1] = self + .instance + .evaluate(&mut self.store, &self.entrypoints.compat_login, &input) + .await?; + + Ok(res) + } +} + +#[cfg(test)] +mod tests { + + use std::time::SystemTime; + + use super::*; + + fn make_entrypoints() -> Entrypoints { + Entrypoints { + register: "register/violation".to_owned(), + client_registration: "client_registration/violation".to_owned(), + authorization_grant: "authorization_grant/violation".to_owned(), + compat_login: "compat_login/violation".to_owned(), + email: "email/violation".to_owned(), + } + } + + #[tokio::test] + async fn test_register() { + let data = Data::new("example.com".to_owned(), None).with_rest(serde_json::json!({ + "allowed_domains": ["element.io", "*.element.io"], + "banned_domains": ["staging.element.io"], + })); + + #[allow(clippy::disallowed_types)] + let path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")) + .join("..") + .join("..") + .join("policies") + .join("policy.wasm"); + + let file = tokio::fs::File::open(path).await.unwrap(); + + let factory = PolicyFactory::load(file, data, make_entrypoints()) + .await + .unwrap(); + + let mut policy = factory.instantiate().await.unwrap(); + + let res = policy + .evaluate_register(RegisterInput { + registration_method: RegistrationMethod::Password, + username: "hello", + email: Some("hello@example.com"), + requester: Requester { + ip_address: None, + user_agent: None, + }, + }) + .await + .unwrap(); + assert!(!res.valid()); + + let res = policy + .evaluate_register(RegisterInput { + registration_method: RegistrationMethod::Password, + username: "hello", + email: Some("hello@foo.element.io"), + requester: Requester { + ip_address: None, + user_agent: None, + }, + }) + .await + .unwrap(); + assert!(res.valid()); + + let res = policy + .evaluate_register(RegisterInput { + registration_method: RegistrationMethod::Password, + username: "hello", + email: Some("hello@staging.element.io"), + requester: Requester { + ip_address: None, + user_agent: None, + }, + }) + .await + .unwrap(); + assert!(!res.valid()); + } + + #[tokio::test] + async fn test_dynamic_data() { + let data = Data::new("example.com".to_owned(), None); + + #[allow(clippy::disallowed_types)] + let path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")) + .join("..") + .join("..") + .join("policies") + .join("policy.wasm"); + + let file = tokio::fs::File::open(path).await.unwrap(); + + let factory = PolicyFactory::load(file, data, make_entrypoints()) + .await + .unwrap(); + + let mut policy = factory.instantiate().await.unwrap(); + + let res = policy + .evaluate_register(RegisterInput { + registration_method: RegistrationMethod::Password, + username: "hello", + email: Some("hello@example.com"), + requester: Requester { + ip_address: None, + user_agent: None, + }, + }) + .await + .unwrap(); + assert!(res.valid()); + + // Update the policy data + factory + .set_dynamic_data(mas_data_model::PolicyData { + id: Ulid::nil(), + created_at: SystemTime::now().into(), + data: serde_json::json!({ + "emails": { + "banned_addresses": { + "substrings": ["hello"] + } + } + }), + }) + .await + .unwrap(); + let mut policy = factory.instantiate().await.unwrap(); + let res = policy + .evaluate_register(RegisterInput { + registration_method: RegistrationMethod::Password, + username: "hello", + email: Some("hello@example.com"), + requester: Requester { + ip_address: None, + user_agent: None, + }, + }) + .await + .unwrap(); + assert!(!res.valid()); + } + + #[tokio::test] + async fn test_big_dynamic_data() { + let data = Data::new("example.com".to_owned(), None); + + #[allow(clippy::disallowed_types)] + let path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")) + .join("..") + .join("..") + .join("policies") + .join("policy.wasm"); + + let file = tokio::fs::File::open(path).await.unwrap(); + + let factory = PolicyFactory::load(file, data, make_entrypoints()) + .await + .unwrap(); + + // That is around 1 MB of JSON data. Each element is a 5-digit string, so 8 + // characters including the quotes and a comma. + let data: Vec = (0..(1024 * 1024 / 8)) + .map(|i| format!("{:05}", i % 100_000)) + .collect(); + let json = serde_json::json!({ "emails": { "banned_addresses": { "substrings": data } } }); + factory + .set_dynamic_data(mas_data_model::PolicyData { + id: Ulid::nil(), + created_at: SystemTime::now().into(), + data: json, + }) + .await + .unwrap(); + + // Try instantiating the policy, make sure 5-digit numbers are banned from email + // addresses + let mut policy = factory.instantiate().await.unwrap(); + let res = policy + .evaluate_register(RegisterInput { + registration_method: RegistrationMethod::Password, + username: "hello", + email: Some("12345@example.com"), + requester: Requester { + ip_address: None, + user_agent: None, + }, + }) + .await + .unwrap(); + assert!(!res.valid()); + } + + #[test] + fn test_merge() { + use serde_json::json as j; + + // Merging objects + let res = merge_data(j!({"hello": "world"}), j!({"foo": "bar"})).unwrap(); + assert_eq!(res, j!({"hello": "world", "foo": "bar"})); + + // Override a value of the same type + let res = merge_data(j!({"hello": "world"}), j!({"hello": "john"})).unwrap(); + assert_eq!(res, j!({"hello": "john"})); + + let res = merge_data(j!({"hello": true}), j!({"hello": false})).unwrap(); + assert_eq!(res, j!({"hello": false})); + + let res = merge_data(j!({"hello": 0}), j!({"hello": 42})).unwrap(); + assert_eq!(res, j!({"hello": 42})); + + // Override a value of a different type + merge_data(j!({"hello": "world"}), j!({"hello": 123})) + .expect_err("Can't merge different types"); + + // Merge arrays + let res = merge_data(j!({"hello": ["world"]}), j!({"hello": ["john"]})).unwrap(); + assert_eq!(res, j!({"hello": ["world", "john"]})); + + // Null overrides a value + let res = merge_data(j!({"hello": "world"}), j!({"hello": null})).unwrap(); + assert_eq!(res, j!({"hello": null})); + + // Null gets overridden by a value + let res = merge_data(j!({"hello": null}), j!({"hello": "world"})).unwrap(); + assert_eq!(res, j!({"hello": "world"})); + + // Objects get deeply merged + let res = merge_data(j!({"a": {"b": {"c": "d"}}}), j!({"a": {"b": {"e": "f"}}})).unwrap(); + assert_eq!(res, j!({"a": {"b": {"c": "d", "e": "f"}}})); + } +} diff --git a/matrix-authentication-service/crates/policy/src/model.rs b/matrix-authentication-service/crates/policy/src/model.rs new file mode 100644 index 00000000..a9f5fb50 --- /dev/null +++ b/matrix-authentication-service/crates/policy/src/model.rs @@ -0,0 +1,243 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Input and output types for policy evaluation. +//! +//! This is useful to generate JSON schemas for each input type, which can then +//! be type-checked by Open Policy Agent. + +use std::net::IpAddr; + +use mas_data_model::{Client, User}; +use oauth2_types::{registration::VerifiedClientMetadata, scope::Scope}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +/// A well-known policy code. +#[derive(Serialize, Deserialize, Debug, Clone, Copy, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "kebab-case")] +pub enum Code { + /// The username is too short. + UsernameTooShort, + + /// The username is too long. + UsernameTooLong, + + /// The username contains invalid characters. + UsernameInvalidChars, + + /// The username contains only numeric characters. + UsernameAllNumeric, + + /// The username is banned. + UsernameBanned, + + /// The username is not allowed. + UsernameNotAllowed, + + /// The email domain is not allowed. + EmailDomainNotAllowed, + + /// The email domain is banned. + EmailDomainBanned, + + /// The email address is not allowed. + EmailNotAllowed, + + /// The email address is banned. + EmailBanned, + + /// The user has reached their session limit. + TooManySessions, +} + +impl Code { + /// Returns the code as a string + #[must_use] + pub fn as_str(&self) -> &'static str { + match self { + Self::UsernameTooShort => "username-too-short", + Self::UsernameTooLong => "username-too-long", + Self::UsernameInvalidChars => "username-invalid-chars", + Self::UsernameAllNumeric => "username-all-numeric", + Self::UsernameBanned => "username-banned", + Self::UsernameNotAllowed => "username-not-allowed", + Self::EmailDomainNotAllowed => "email-domain-not-allowed", + Self::EmailDomainBanned => "email-domain-banned", + Self::EmailNotAllowed => "email-not-allowed", + Self::EmailBanned => "email-banned", + Self::TooManySessions => "too-many-sessions", + } + } +} + +/// A single violation of a policy. +#[derive(Serialize, Deserialize, Debug, JsonSchema)] +pub struct Violation { + pub msg: String, + pub redirect_uri: Option, + pub field: Option, + pub code: Option, +} + +/// The result of a policy evaluation. +#[derive(Deserialize, Debug)] +pub struct EvaluationResult { + #[serde(rename = "result")] + pub violations: Vec, +} + +impl std::fmt::Display for EvaluationResult { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut first = true; + for violation in &self.violations { + if first { + first = false; + } else { + write!(f, ", ")?; + } + write!(f, "{}", violation.msg)?; + } + Ok(()) + } +} + +impl EvaluationResult { + /// Returns true if the policy evaluation was successful. + #[must_use] + pub fn valid(&self) -> bool { + self.violations.is_empty() + } +} + +/// Identity of the requester +#[derive(Serialize, Debug, Default, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub struct Requester { + /// IP address of the entity making the request + pub ip_address: Option, + + /// User agent of the entity making the request + pub user_agent: Option, +} + +#[derive(Serialize, Debug, JsonSchema)] +pub enum RegistrationMethod { + #[serde(rename = "password")] + Password, + + #[serde(rename = "upstream-oauth2")] + UpstreamOAuth2, +} + +/// Input for the user registration policy. +#[derive(Serialize, Debug, JsonSchema)] +#[serde(tag = "registration_method")] +pub struct RegisterInput<'a> { + pub registration_method: RegistrationMethod, + + pub username: &'a str, + + #[serde(skip_serializing_if = "Option::is_none")] + pub email: Option<&'a str>, + + pub requester: Requester, +} + +/// Input for the client registration policy. +#[derive(Serialize, Debug, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub struct ClientRegistrationInput<'a> { + #[schemars(with = "std::collections::HashMap")] + pub client_metadata: &'a VerifiedClientMetadata, + pub requester: Requester, +} + +#[derive(Serialize, Debug, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum GrantType { + AuthorizationCode, + ClientCredentials, + #[serde(rename = "urn:ietf:params:oauth:grant-type:device_code")] + DeviceCode, +} + +/// Input for the authorization grant policy. +#[derive(Serialize, Debug, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub struct AuthorizationGrantInput<'a> { + #[schemars(with = "Option>")] + pub user: Option<&'a User>, + + /// How many sessions the user has. + /// Not populated if it's not a user logging in. + pub session_counts: Option, + + #[schemars(with = "std::collections::HashMap")] + pub client: &'a Client, + + #[schemars(with = "String")] + pub scope: &'a Scope, + + pub grant_type: GrantType, + + pub requester: Requester, +} + +/// Input for the compatibility login policy. +#[derive(Serialize, Debug, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub struct CompatLoginInput<'a> { + #[schemars(with = "std::collections::HashMap")] + pub user: &'a User, + + /// How many sessions the user has. + pub session_counts: SessionCounts, + + /// Whether a session will be replaced by this login + pub session_replaced: bool, + + /// What type of login is being performed. + /// This also determines whether the login is interactive. + pub login: CompatLogin, + + pub requester: Requester, +} + +#[derive(Serialize, Debug, JsonSchema)] +#[serde(tag = "type")] +pub enum CompatLogin { + /// Used as the interactive part of SSO login. + #[serde(rename = "m.login.sso")] + Sso { redirect_uri: String }, + + /// Used as the final (non-interactive) stage of SSO login. + #[serde(rename = "m.login.token")] + Token, + + /// Non-interactive password-over-the-API login. + #[serde(rename = "m.login.password")] + Password, +} + +/// Information about how many sessions the user has +#[derive(Serialize, Debug, JsonSchema)] +pub struct SessionCounts { + pub total: u64, + + pub oauth2: u64, + pub compat: u64, + pub personal: u64, +} + +/// Input for the email add policy. +#[derive(Serialize, Debug, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub struct EmailInput<'a> { + pub email: &'a str, + + pub requester: Requester, +} diff --git a/matrix-authentication-service/crates/router/Cargo.toml b/matrix-authentication-service/crates/router/Cargo.toml new file mode 100644 index 00000000..07cd3a91 --- /dev/null +++ b/matrix-authentication-service/crates/router/Cargo.toml @@ -0,0 +1,24 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-router" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +axum.workspace = true +serde_urlencoded.workspace = true +serde.workspace = true +ulid.workspace = true +url.workspace = true diff --git a/matrix-authentication-service/crates/router/src/endpoints.rs b/matrix-authentication-service/crates/router/src/endpoints.rs new file mode 100644 index 00000000..6aa18f13 --- /dev/null +++ b/matrix-authentication-service/crates/router/src/endpoints.rs @@ -0,0 +1,963 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::UrlBuilder; +pub use crate::traits::*; + +#[derive(Deserialize, Serialize, Clone, Debug)] +#[serde(rename_all = "snake_case", tag = "kind")] +pub enum PostAuthAction { + ContinueAuthorizationGrant { + id: Ulid, + }, + ContinueDeviceCodeGrant { + id: Ulid, + }, + ContinueCompatSsoLogin { + id: Ulid, + }, + ChangePassword, + LinkUpstream { + id: Ulid, + }, + ManageAccount { + #[serde(flatten)] + action: Option, + }, +} + +impl PostAuthAction { + #[must_use] + pub const fn continue_grant(id: Ulid) -> Self { + PostAuthAction::ContinueAuthorizationGrant { id } + } + + #[must_use] + pub const fn continue_device_code_grant(id: Ulid) -> Self { + PostAuthAction::ContinueDeviceCodeGrant { id } + } + + #[must_use] + pub const fn continue_compat_sso_login(id: Ulid) -> Self { + PostAuthAction::ContinueCompatSsoLogin { id } + } + + #[must_use] + pub const fn link_upstream(id: Ulid) -> Self { + PostAuthAction::LinkUpstream { id } + } + + #[must_use] + pub const fn manage_account(action: Option) -> Self { + PostAuthAction::ManageAccount { action } + } + + pub fn go_next(&self, url_builder: &UrlBuilder) -> axum::response::Redirect { + match self { + Self::ContinueAuthorizationGrant { id } => url_builder.redirect(&Consent(*id)), + Self::ContinueDeviceCodeGrant { id } => { + url_builder.redirect(&DeviceCodeConsent::new(*id)) + } + Self::ContinueCompatSsoLogin { id } => { + url_builder.redirect(&CompatLoginSsoComplete::new(*id, None)) + } + Self::ChangePassword => url_builder.redirect(&AccountPasswordChange), + Self::LinkUpstream { id } => url_builder.redirect(&UpstreamOAuth2Link::new(*id)), + Self::ManageAccount { action } => url_builder.redirect(&Account { + action: action.clone(), + }), + } + } +} + +/// `GET /.well-known/openid-configuration` +#[derive(Default, Debug, Clone)] +pub struct OidcConfiguration; + +impl SimpleRoute for OidcConfiguration { + const PATH: &'static str = "/.well-known/openid-configuration"; +} + +/// `GET /.well-known/webfinger` +#[derive(Default, Debug, Clone)] +pub struct Webfinger; + +impl SimpleRoute for Webfinger { + const PATH: &'static str = "/.well-known/webfinger"; +} + +/// `GET /.well-known/change-password` +pub struct ChangePasswordDiscovery; + +impl SimpleRoute for ChangePasswordDiscovery { + const PATH: &'static str = "/.well-known/change-password"; +} + +/// `GET /oauth2/keys.json` +#[derive(Default, Debug, Clone)] +pub struct OAuth2Keys; + +impl SimpleRoute for OAuth2Keys { + const PATH: &'static str = "/oauth2/keys.json"; +} + +/// `GET /oauth2/userinfo` +#[derive(Default, Debug, Clone)] +pub struct OidcUserinfo; + +impl SimpleRoute for OidcUserinfo { + const PATH: &'static str = "/oauth2/userinfo"; +} + +/// `POST /oauth2/introspect` +#[derive(Default, Debug, Clone)] +pub struct OAuth2Introspection; + +impl SimpleRoute for OAuth2Introspection { + const PATH: &'static str = "/oauth2/introspect"; +} + +/// `POST /oauth2/revoke` +#[derive(Default, Debug, Clone)] +pub struct OAuth2Revocation; + +impl SimpleRoute for OAuth2Revocation { + const PATH: &'static str = "/oauth2/revoke"; +} + +/// `POST /oauth2/token` +#[derive(Default, Debug, Clone)] +pub struct OAuth2TokenEndpoint; + +impl SimpleRoute for OAuth2TokenEndpoint { + const PATH: &'static str = "/oauth2/token"; +} + +/// `POST /oauth2/registration` +#[derive(Default, Debug, Clone)] +pub struct OAuth2RegistrationEndpoint; + +impl SimpleRoute for OAuth2RegistrationEndpoint { + const PATH: &'static str = "/oauth2/registration"; +} + +/// `GET /authorize` +#[derive(Default, Debug, Clone)] +pub struct OAuth2AuthorizationEndpoint; + +impl SimpleRoute for OAuth2AuthorizationEndpoint { + const PATH: &'static str = "/authorize"; +} + +/// `GET /` +#[derive(Default, Debug, Clone)] +pub struct Index; + +impl SimpleRoute for Index { + const PATH: &'static str = "/"; +} + +/// `GET /health` +#[derive(Default, Debug, Clone)] +pub struct Healthcheck; + +impl SimpleRoute for Healthcheck { + const PATH: &'static str = "/health"; +} + +/// `GET|POST /login` +#[derive(Default, Debug, Clone)] +pub struct Login { + post_auth_action: Option, +} + +impl Route for Login { + type Query = PostAuthAction; + + fn route() -> &'static str { + "/login" + } + + fn query(&self) -> Option<&Self::Query> { + self.post_auth_action.as_ref() + } +} + +impl Login { + #[must_use] + pub const fn and_then(action: PostAuthAction) -> Self { + Self { + post_auth_action: Some(action), + } + } + + #[must_use] + pub const fn and_continue_grant(id: Ulid) -> Self { + Self { + post_auth_action: Some(PostAuthAction::continue_grant(id)), + } + } + + #[must_use] + pub const fn and_continue_device_code_grant(id: Ulid) -> Self { + Self { + post_auth_action: Some(PostAuthAction::continue_device_code_grant(id)), + } + } + + #[must_use] + pub const fn and_continue_compat_sso_login(id: Ulid) -> Self { + Self { + post_auth_action: Some(PostAuthAction::continue_compat_sso_login(id)), + } + } + + #[must_use] + pub const fn and_link_upstream(id: Ulid) -> Self { + Self { + post_auth_action: Some(PostAuthAction::link_upstream(id)), + } + } + + /// Get a reference to the login's post auth action. + #[must_use] + pub fn post_auth_action(&self) -> Option<&PostAuthAction> { + self.post_auth_action.as_ref() + } + + pub fn go_next(&self, url_builder: &UrlBuilder) -> axum::response::Redirect { + match &self.post_auth_action { + Some(action) => action.go_next(url_builder), + None => url_builder.redirect(&Index), + } + } +} + +impl From> for Login { + fn from(post_auth_action: Option) -> Self { + Self { post_auth_action } + } +} + +/// `POST /logout` +#[derive(Default, Debug, Clone)] +pub struct Logout; + +impl SimpleRoute for Logout { + const PATH: &'static str = "/logout"; +} + +/// `POST /register` +#[derive(Default, Debug, Clone)] +pub struct Register { + post_auth_action: Option, +} + +impl Register { + #[must_use] + pub fn and_then(action: PostAuthAction) -> Self { + Self { + post_auth_action: Some(action), + } + } + + #[must_use] + pub fn and_continue_grant(data: Ulid) -> Self { + Self { + post_auth_action: Some(PostAuthAction::continue_grant(data)), + } + } + + #[must_use] + pub fn and_continue_compat_sso_login(data: Ulid) -> Self { + Self { + post_auth_action: Some(PostAuthAction::continue_compat_sso_login(data)), + } + } + + /// Get a reference to the reauth's post auth action. + #[must_use] + pub fn post_auth_action(&self) -> Option<&PostAuthAction> { + self.post_auth_action.as_ref() + } + + pub fn go_next(&self, url_builder: &UrlBuilder) -> axum::response::Redirect { + match &self.post_auth_action { + Some(action) => action.go_next(url_builder), + None => url_builder.redirect(&Index), + } + } +} + +impl Route for Register { + type Query = PostAuthAction; + + fn route() -> &'static str { + "/register" + } + + fn query(&self) -> Option<&Self::Query> { + self.post_auth_action.as_ref() + } +} + +impl From> for Register { + fn from(post_auth_action: Option) -> Self { + Self { post_auth_action } + } +} + +/// `GET|POST /register/password` +#[derive(Default, Debug, Clone, Serialize, Deserialize)] +pub struct PasswordRegister { + username: Option, + + #[serde(flatten)] + post_auth_action: Option, +} + +impl PasswordRegister { + #[must_use] + pub fn and_then(mut self, action: PostAuthAction) -> Self { + self.post_auth_action = Some(action); + self + } + + #[must_use] + pub fn and_continue_grant(mut self, data: Ulid) -> Self { + self.post_auth_action = Some(PostAuthAction::continue_grant(data)); + self + } + + #[must_use] + pub fn and_continue_compat_sso_login(mut self, data: Ulid) -> Self { + self.post_auth_action = Some(PostAuthAction::continue_compat_sso_login(data)); + self + } + + /// Get a reference to the post auth action. + #[must_use] + pub fn post_auth_action(&self) -> Option<&PostAuthAction> { + self.post_auth_action.as_ref() + } + + /// Get a reference to the username chosen by the user. + #[must_use] + pub fn username(&self) -> Option<&str> { + self.username.as_deref() + } + + pub fn go_next(&self, url_builder: &UrlBuilder) -> axum::response::Redirect { + match &self.post_auth_action { + Some(action) => action.go_next(url_builder), + None => url_builder.redirect(&Index), + } + } +} + +impl Route for PasswordRegister { + type Query = Self; + + fn route() -> &'static str { + "/register/password" + } + + fn query(&self) -> Option<&Self::Query> { + Some(self) + } +} + +impl From> for PasswordRegister { + fn from(post_auth_action: Option) -> Self { + Self { + username: None, + post_auth_action, + } + } +} + +/// `GET|POST /register/steps/{id}/token` +#[derive(Debug, Clone)] +pub struct RegisterToken { + id: Ulid, +} + +impl RegisterToken { + #[must_use] + pub fn new(id: Ulid) -> Self { + Self { id } + } +} + +impl Route for RegisterToken { + type Query = (); + fn route() -> &'static str { + "/register/steps/{id}/token" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/register/steps/{}/token", self.id).into() + } +} + +/// `GET|POST /register/steps/{id}/display-name` +#[derive(Debug, Clone)] +pub struct RegisterDisplayName { + id: Ulid, +} + +impl RegisterDisplayName { + #[must_use] + pub fn new(id: Ulid) -> Self { + Self { id } + } +} + +impl Route for RegisterDisplayName { + type Query = (); + fn route() -> &'static str { + "/register/steps/{id}/display-name" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/register/steps/{}/display-name", self.id).into() + } +} + +/// `GET|POST /register/steps/{id}/verify-email` +#[derive(Debug, Clone)] +pub struct RegisterVerifyEmail { + id: Ulid, +} + +impl RegisterVerifyEmail { + #[must_use] + pub fn new(id: Ulid) -> Self { + Self { id } + } +} + +impl Route for RegisterVerifyEmail { + type Query = (); + fn route() -> &'static str { + "/register/steps/{id}/verify-email" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/register/steps/{}/verify-email", self.id).into() + } +} + +/// `GET /register/steps/{id}/finish` +#[derive(Debug, Clone)] +pub struct RegisterFinish { + id: Ulid, +} + +impl RegisterFinish { + #[must_use] + pub const fn new(id: Ulid) -> Self { + Self { id } + } +} + +impl Route for RegisterFinish { + type Query = (); + fn route() -> &'static str { + "/register/steps/{id}/finish" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/register/steps/{}/finish", self.id).into() + } +} + +/// Actions parameters as defined by MSC4191 +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "action")] +pub enum AccountAction { + #[serde(rename = "org.matrix.profile")] + OrgMatrixProfile, + /// DEPRECATED: Use `OrgMatrixProfile` instead + #[serde(rename = "profile")] + Profile, + + #[serde(rename = "org.matrix.devices_list")] + OrgMatrixDevicesList, + /// DEPRECATED: Use `OrgMatrixDevicesList` instead + #[serde(rename = "org.matrix.sessions_list")] + OrgMatrixSessionsList, + /// DEPRECATED: Use `OrgMatrixDevicesList` instead + #[serde(rename = "sessions_list")] + SessionsList, + + #[serde(rename = "org.matrix.device_view")] + OrgMatrixDeviceView { device_id: String }, + /// DEPRECATED: Use `OrgMatrixDeviceView` instead + #[serde(rename = "org.matrix.session_view")] + OrgMatrixSessionView { device_id: String }, + /// DEPRECATED: Use `OrgMatrixDeviceView` instead + #[serde(rename = "session_view")] + SessionView { device_id: String }, + + #[serde(rename = "org.matrix.device_delete")] + OrgMatrixDeviceDelete { device_id: String }, + /// DEPRECATED: Use `OrgMatrixDeviceDelete` instead + #[serde(rename = "org.matrix.session_end")] + OrgMatrixSessionEnd { device_id: String }, + /// DEPRECATED: Use `OrgMatrixDeviceDelete` instead + #[serde(rename = "session_end")] + SessionEnd { device_id: String }, + + #[serde(rename = "org.matrix.cross_signing_reset")] + OrgMatrixCrossSigningReset, +} + +/// `GET /account/` +#[derive(Default, Debug, Clone)] +pub struct Account { + action: Option, +} + +impl Route for Account { + type Query = AccountAction; + + fn route() -> &'static str { + "/account/" + } + + fn query(&self) -> Option<&Self::Query> { + self.action.as_ref() + } +} + +/// `GET /account/*` +#[derive(Default, Debug, Clone)] +pub struct AccountWildcard; + +impl SimpleRoute for AccountWildcard { + const PATH: &'static str = "/account/{*rest}"; +} + +/// `GET /account/password/change` +/// +/// Handled by the React frontend; this struct definition is purely for +/// redirects. +#[derive(Default, Debug, Clone)] +pub struct AccountPasswordChange; + +impl SimpleRoute for AccountPasswordChange { + const PATH: &'static str = "/account/password/change"; +} + +/// `GET /consent/{grant_id}` +#[derive(Debug, Clone)] +pub struct Consent(pub Ulid); + +impl Route for Consent { + type Query = (); + fn route() -> &'static str { + "/consent/{grant_id}" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/consent/{}", self.0).into() + } +} + +/// `GET|POST /_matrix/client/v3/login` +pub struct CompatLogin; + +impl SimpleRoute for CompatLogin { + const PATH: &'static str = "/_matrix/client/{version}/login"; +} + +/// `POST /_matrix/client/v3/logout` +pub struct CompatLogout; + +impl SimpleRoute for CompatLogout { + const PATH: &'static str = "/_matrix/client/{version}/logout"; +} + +/// `POST /_matrix/client/v3/logout/all` +pub struct CompatLogoutAll; + +impl SimpleRoute for CompatLogoutAll { + const PATH: &'static str = "/_matrix/client/{version}/logout/all"; +} + +/// `POST /_matrix/client/v3/refresh` +pub struct CompatRefresh; + +impl SimpleRoute for CompatRefresh { + const PATH: &'static str = "/_matrix/client/{version}/refresh"; +} + +/// `GET /_matrix/client/v3/login/sso/redirect` +pub struct CompatLoginSsoRedirect; + +impl SimpleRoute for CompatLoginSsoRedirect { + const PATH: &'static str = "/_matrix/client/{version}/login/sso/redirect"; +} + +/// `GET /_matrix/client/v3/login/sso/redirect/` +/// +/// This is a workaround for the fact some clients (Element iOS) sends a +/// trailing slash, even though it's not in the spec. +pub struct CompatLoginSsoRedirectSlash; + +impl SimpleRoute for CompatLoginSsoRedirectSlash { + const PATH: &'static str = "/_matrix/client/{version}/login/sso/redirect/"; +} + +/// `GET /_matrix/client/v3/login/sso/redirect/{idp}` +pub struct CompatLoginSsoRedirectIdp; + +impl SimpleRoute for CompatLoginSsoRedirectIdp { + const PATH: &'static str = "/_matrix/client/{version}/login/sso/redirect/{idp}"; +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +#[serde(rename_all = "lowercase")] +pub enum CompatLoginSsoAction { + Login, + Register, + #[serde(other)] + Unknown, +} + +impl CompatLoginSsoAction { + /// Returns true if the action is a known action. + #[must_use] + pub fn is_known(&self) -> bool { + !matches!(self, Self::Unknown) + } +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +pub struct CompatLoginSsoActionParams { + action: CompatLoginSsoAction, + /// DEPRECATED: Use `action` instead. We will remove this once enough + /// clients support the stable name. + #[serde(rename = "org.matrix.msc3824.action")] + unstable_action: CompatLoginSsoAction, +} + +/// `GET|POST /complete-compat-sso/{id}` +pub struct CompatLoginSsoComplete { + id: Ulid, + query: Option, +} + +impl CompatLoginSsoComplete { + #[must_use] + pub fn new(id: Ulid, action: Option) -> Self { + Self { + id, + query: action.map(|action| CompatLoginSsoActionParams { + action, + unstable_action: action, + }), + } + } +} + +impl Route for CompatLoginSsoComplete { + type Query = CompatLoginSsoActionParams; + + fn query(&self) -> Option<&Self::Query> { + self.query.as_ref() + } + + fn route() -> &'static str { + "/complete-compat-sso/{grant_id}" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/complete-compat-sso/{}", self.id).into() + } +} + +/// `GET /upstream/authorize/{id}` +pub struct UpstreamOAuth2Authorize { + id: Ulid, + post_auth_action: Option, +} + +impl UpstreamOAuth2Authorize { + #[must_use] + pub const fn new(id: Ulid) -> Self { + Self { + id, + post_auth_action: None, + } + } + + #[must_use] + pub fn and_then(mut self, action: PostAuthAction) -> Self { + self.post_auth_action = Some(action); + self + } +} + +impl Route for UpstreamOAuth2Authorize { + type Query = PostAuthAction; + fn route() -> &'static str { + "/upstream/authorize/{provider_id}" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/upstream/authorize/{}", self.id).into() + } + + fn query(&self) -> Option<&Self::Query> { + self.post_auth_action.as_ref() + } +} + +/// `GET /upstream/callback/{id}` +pub struct UpstreamOAuth2Callback { + id: Ulid, +} + +impl UpstreamOAuth2Callback { + #[must_use] + pub const fn new(id: Ulid) -> Self { + Self { id } + } +} + +impl Route for UpstreamOAuth2Callback { + type Query = (); + fn route() -> &'static str { + "/upstream/callback/{provider_id}" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/upstream/callback/{}", self.id).into() + } +} + +/// `GET /upstream/link/{id}` +pub struct UpstreamOAuth2Link { + id: Ulid, +} + +impl UpstreamOAuth2Link { + #[must_use] + pub const fn new(id: Ulid) -> Self { + Self { id } + } +} + +impl Route for UpstreamOAuth2Link { + type Query = (); + fn route() -> &'static str { + "/upstream/link/{link_id}" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/upstream/link/{}", self.id).into() + } +} + +/// `POST /upstream/backchannel-logout/{id}` +pub struct UpstreamOAuth2BackchannelLogout { + id: Ulid, +} + +impl UpstreamOAuth2BackchannelLogout { + #[must_use] + pub const fn new(id: Ulid) -> Self { + Self { id } + } +} + +impl Route for UpstreamOAuth2BackchannelLogout { + type Query = (); + fn route() -> &'static str { + "/upstream/backchannel-logout/{provider_id}" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/upstream/backchannel-logout/{}", self.id).into() + } +} + +/// `GET|POST /link` +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct DeviceCodeLink { + code: Option, +} + +impl DeviceCodeLink { + #[must_use] + pub fn with_code(code: String) -> Self { + Self { code: Some(code) } + } +} + +impl Route for DeviceCodeLink { + type Query = DeviceCodeLink; + fn route() -> &'static str { + "/link" + } + + fn query(&self) -> Option<&Self::Query> { + Some(self) + } +} + +/// `GET|POST /device/{device_code_id}` +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct DeviceCodeConsent { + id: Ulid, +} + +impl Route for DeviceCodeConsent { + type Query = (); + fn route() -> &'static str { + "/device/{device_code_id}" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/device/{}", self.id).into() + } +} + +impl DeviceCodeConsent { + #[must_use] + pub fn new(id: Ulid) -> Self { + Self { id } + } +} + +/// `POST /oauth2/device` +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct OAuth2DeviceAuthorizationEndpoint; + +impl SimpleRoute for OAuth2DeviceAuthorizationEndpoint { + const PATH: &'static str = "/oauth2/device"; +} + +/// `GET|POST /recover` +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct AccountRecoveryStart; + +impl SimpleRoute for AccountRecoveryStart { + const PATH: &'static str = "/recover"; +} + +/// `GET|POST /recover/progress/{session_id}` +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct AccountRecoveryProgress { + session_id: Ulid, +} + +impl AccountRecoveryProgress { + #[must_use] + pub fn new(session_id: Ulid) -> Self { + Self { session_id } + } +} + +impl Route for AccountRecoveryProgress { + type Query = (); + fn route() -> &'static str { + "/recover/progress/{session_id}" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/recover/progress/{}", self.session_id).into() + } +} + +/// `GET /account/password/recovery?ticket=:ticket` +/// Rendered by the React frontend +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +pub struct AccountRecoveryFinish { + ticket: String, +} + +impl AccountRecoveryFinish { + #[must_use] + pub fn new(ticket: String) -> Self { + Self { ticket } + } +} + +impl Route for AccountRecoveryFinish { + type Query = AccountRecoveryFinish; + + fn route() -> &'static str { + "/account/password/recovery" + } + + fn query(&self) -> Option<&Self::Query> { + Some(self) + } +} + +/// `GET /assets` +pub struct StaticAsset { + path: String, +} + +impl StaticAsset { + #[must_use] + pub fn new(path: String) -> Self { + Self { path } + } +} + +impl Route for StaticAsset { + type Query = (); + fn route() -> &'static str { + "/assets/" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/assets/{}", self.path).into() + } +} + +/// `GET|POST /graphql` +pub struct GraphQL; + +impl SimpleRoute for GraphQL { + const PATH: &'static str = "/graphql"; +} + +/// `GET /graphql/playground` +pub struct GraphQLPlayground; + +impl SimpleRoute for GraphQLPlayground { + const PATH: &'static str = "/graphql/playground"; +} + +/// `GET /api/spec.json` +pub struct ApiSpec; + +impl SimpleRoute for ApiSpec { + const PATH: &'static str = "/api/spec.json"; +} + +/// `GET /api/doc/` +pub struct ApiDoc; + +impl SimpleRoute for ApiDoc { + const PATH: &'static str = "/api/doc/"; +} + +/// `GET /api/doc/oauth2-callback` +pub struct ApiDocCallback; + +impl SimpleRoute for ApiDocCallback { + const PATH: &'static str = "/api/doc/oauth2-callback"; +} diff --git a/matrix-authentication-service/crates/router/src/lib.rs b/matrix-authentication-service/crates/router/src/lib.rs new file mode 100644 index 00000000..0308fdfc --- /dev/null +++ b/matrix-authentication-service/crates/router/src/lib.rs @@ -0,0 +1,44 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +pub(crate) mod endpoints; +pub(crate) mod traits; +mod url_builder; + +pub use self::{endpoints::*, traits::Route, url_builder::UrlBuilder}; + +#[cfg(test)] +mod tests { + use std::borrow::Cow; + + use ulid::Ulid; + use url::Url; + + use super::*; + + #[test] + fn test_relative_urls() { + assert_eq!( + OidcConfiguration.path_and_query(), + Cow::Borrowed("/.well-known/openid-configuration") + ); + assert_eq!(Index.path_and_query(), Cow::Borrowed("/")); + assert_eq!( + Login::and_continue_grant(Ulid::nil()).path_and_query(), + Cow::Borrowed("/login?kind=continue_authorization_grant&id=00000000000000000000000000") + ); + } + + #[test] + fn test_absolute_urls() { + let base = Url::try_from("https://example.com/").unwrap(); + assert_eq!(Index.absolute_url(&base).as_str(), "https://example.com/"); + assert_eq!( + OidcConfiguration.absolute_url(&base).as_str(), + "https://example.com/.well-known/openid-configuration" + ); + } +} diff --git a/matrix-authentication-service/crates/router/src/traits.rs b/matrix-authentication-service/crates/router/src/traits.rs new file mode 100644 index 00000000..5e7f13bd --- /dev/null +++ b/matrix-authentication-service/crates/router/src/traits.rs @@ -0,0 +1,54 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::borrow::Cow; + +use serde::Serialize; +use url::Url; + +pub trait Route { + type Query: Serialize; + fn route() -> &'static str; + fn query(&self) -> Option<&Self::Query> { + None + } + + fn path(&self) -> Cow<'static, str> { + Cow::Borrowed(Self::route()) + } + + fn path_and_query(&self) -> Cow<'static, str> { + let path = self.path(); + if let Some(query) = self.query() { + let query = serde_urlencoded::to_string(query).unwrap(); + + if query.is_empty() { + path + } else { + format!("{path}?{query}").into() + } + } else { + path + } + } + + fn absolute_url(&self, base: &Url) -> Url { + let relative = self.path_and_query(); + let relative = relative.trim_start_matches('/'); + base.join(relative).unwrap() + } +} + +pub trait SimpleRoute { + const PATH: &'static str; +} + +impl Route for T { + type Query = (); + fn route() -> &'static str { + Self::PATH + } +} diff --git a/matrix-authentication-service/crates/router/src/url_builder.rs b/matrix-authentication-service/crates/router/src/url_builder.rs new file mode 100644 index 00000000..f216fb34 --- /dev/null +++ b/matrix-authentication-service/crates/router/src/url_builder.rs @@ -0,0 +1,305 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Utility to build URLs + +use ulid::Ulid; +use url::Url; + +use crate::traits::Route; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct UrlBuilder { + http_base: Url, + prefix: String, + assets_base: String, + issuer: Url, +} + +impl UrlBuilder { + /// Create an absolute URL for a route + #[must_use] + pub fn absolute_url_for(&self, destination: &U) -> Url + where + U: Route, + { + destination.absolute_url(&self.http_base) + } + + /// Create a relative URL for a route, prefixed with the base URL + #[must_use] + pub fn relative_url_for(&self, destination: &U) -> String + where + U: Route, + { + format!( + "{prefix}{destination}", + prefix = self.prefix, + destination = destination.path_and_query() + ) + } + + /// The prefix added to all relative URLs + #[must_use] + pub fn prefix(&self) -> Option<&str> { + if self.prefix.is_empty() { + None + } else { + Some(&self.prefix) + } + } + + /// Create a (relative) redirect response to a route + pub fn redirect(&self, destination: &U) -> axum::response::Redirect + where + U: Route, + { + let uri = self.relative_url_for(destination); + axum::response::Redirect::to(&uri) + } + + /// Create an absolute redirect response to a route + pub fn absolute_redirect(&self, destination: &U) -> axum::response::Redirect + where + U: Route, + { + let uri = self.absolute_url_for(destination); + axum::response::Redirect::to(uri.as_str()) + } + + /// Create a new [`UrlBuilder`] from a base URL + /// + /// # Panics + /// + /// Panics if the base URL contains a fragment, a query, credentials or + /// isn't HTTP/HTTPS; + #[must_use] + pub fn new(base: Url, issuer: Option, assets_base: Option) -> Self { + assert!( + base.scheme() == "http" || base.scheme() == "https", + "base URL must be HTTP/HTTPS" + ); + assert_eq!(base.query(), None, "base URL must not contain a query"); + assert_eq!( + base.fragment(), + None, + "base URL must not contain a fragment" + ); + assert_eq!(base.username(), "", "base URL must not contain credentials"); + assert_eq!( + base.password(), + None, + "base URL must not contain credentials" + ); + + let issuer = issuer.unwrap_or_else(|| base.clone()); + let prefix = base.path().trim_end_matches('/').to_owned(); + let assets_base = assets_base.unwrap_or_else(|| format!("{prefix}/assets/")); + Self { + http_base: base, + prefix, + assets_base, + issuer, + } + } + + /// Site public hostname + /// + /// # Panics + /// + /// Panics if the base URL does not have a host + #[must_use] + pub fn public_hostname(&self) -> &str { + self.http_base + .host_str() + .expect("base URL must have a host") + } + + /// HTTP base + #[must_use] + pub fn http_base(&self) -> Url { + self.http_base.clone() + } + + /// OIDC issuer + #[must_use] + pub fn oidc_issuer(&self) -> Url { + self.issuer.clone() + } + + /// OIDC discovery document URL + #[must_use] + pub fn oidc_discovery(&self) -> Url { + crate::endpoints::OidcConfiguration.absolute_url(&self.issuer) + } + + /// OAuth 2.0 authorization endpoint + #[must_use] + pub fn oauth_authorization_endpoint(&self) -> Url { + self.absolute_url_for(&crate::endpoints::OAuth2AuthorizationEndpoint) + } + + /// OAuth 2.0 token endpoint + #[must_use] + pub fn oauth_token_endpoint(&self) -> Url { + self.absolute_url_for(&crate::endpoints::OAuth2TokenEndpoint) + } + + /// OAuth 2.0 introspection endpoint + #[must_use] + pub fn oauth_introspection_endpoint(&self) -> Url { + self.absolute_url_for(&crate::endpoints::OAuth2Introspection) + } + + /// OAuth 2.0 revocation endpoint + #[must_use] + pub fn oauth_revocation_endpoint(&self) -> Url { + self.absolute_url_for(&crate::endpoints::OAuth2Revocation) + } + + /// OAuth 2.0 client registration endpoint + #[must_use] + pub fn oauth_registration_endpoint(&self) -> Url { + self.absolute_url_for(&crate::endpoints::OAuth2RegistrationEndpoint) + } + + /// OAuth 2.0 device authorization endpoint + #[must_use] + pub fn oauth_device_authorization_endpoint(&self) -> Url { + self.absolute_url_for(&crate::endpoints::OAuth2DeviceAuthorizationEndpoint) + } + + /// OAuth 2.0 device code link + #[must_use] + pub fn device_code_link(&self) -> Url { + self.absolute_url_for(&crate::endpoints::DeviceCodeLink::default()) + } + + /// OAuth 2.0 device code link full URL + #[must_use] + pub fn device_code_link_full(&self, code: String) -> Url { + self.absolute_url_for(&crate::endpoints::DeviceCodeLink::with_code(code)) + } + + // OIDC userinfo endpoint + #[must_use] + pub fn oidc_userinfo_endpoint(&self) -> Url { + self.absolute_url_for(&crate::endpoints::OidcUserinfo) + } + + /// JWKS URI + #[must_use] + pub fn jwks_uri(&self) -> Url { + self.absolute_url_for(&crate::endpoints::OAuth2Keys) + } + + /// Static asset + #[must_use] + pub fn static_asset(&self, path: String) -> Url { + self.absolute_url_for(&crate::endpoints::StaticAsset::new(path)) + } + + /// Static asset base + #[must_use] + pub fn assets_base(&self) -> &str { + &self.assets_base + } + + /// GraphQL endpoint + #[must_use] + pub fn graphql_endpoint(&self) -> Url { + self.absolute_url_for(&crate::endpoints::GraphQL) + } + + /// Upstream redirect URI + #[must_use] + pub fn upstream_oauth_callback(&self, id: Ulid) -> Url { + self.absolute_url_for(&crate::endpoints::UpstreamOAuth2Callback::new(id)) + } + + /// Upstream authorize URI + #[must_use] + pub fn upstream_oauth_authorize(&self, id: Ulid) -> Url { + self.absolute_url_for(&crate::endpoints::UpstreamOAuth2Authorize::new(id)) + } + + /// Account management URI + #[must_use] + pub fn account_management_uri(&self) -> Url { + self.absolute_url_for(&crate::endpoints::Account::default()) + } + + /// Account recovery link + #[must_use] + pub fn account_recovery_link(&self, ticket: String) -> Url { + self.absolute_url_for(&crate::endpoints::AccountRecoveryFinish::new(ticket)) + } +} + +#[cfg(test)] +mod tests { + #[test] + #[should_panic(expected = "base URL must be HTTP/HTTPS")] + fn test_invalid_base_url_scheme() { + let _ = super::UrlBuilder::new(url::Url::parse("file:///tmp/").unwrap(), None, None); + } + + #[test] + #[should_panic(expected = "base URL must not contain a query")] + fn test_invalid_base_url_query() { + let _ = super::UrlBuilder::new( + url::Url::parse("https://example.com/?foo=bar").unwrap(), + None, + None, + ); + } + + #[test] + #[should_panic(expected = "base URL must not contain a fragment")] + fn test_invalid_base_url_fragment() { + let _ = super::UrlBuilder::new( + url::Url::parse("https://example.com/#foo").unwrap(), + None, + None, + ); + } + + #[test] + #[should_panic(expected = "base URL must not contain credentials")] + fn test_invalid_base_url_credentials() { + let _ = super::UrlBuilder::new( + url::Url::parse("https://foo@example.com/").unwrap(), + None, + None, + ); + } + + #[test] + fn test_url_prefix() { + let builder = super::UrlBuilder::new( + url::Url::parse("https://example.com/foo/").unwrap(), + None, + None, + ); + assert_eq!(builder.prefix, "/foo"); + + let builder = + super::UrlBuilder::new(url::Url::parse("https://example.com/").unwrap(), None, None); + assert_eq!(builder.prefix, ""); + } + + #[test] + fn test_absolute_uri_prefix() { + let builder = super::UrlBuilder::new( + url::Url::parse("https://example.com/foo/").unwrap(), + None, + None, + ); + + let uri = builder.absolute_url_for(&crate::endpoints::OAuth2AuthorizationEndpoint); + assert_eq!(uri.as_str(), "https://example.com/foo/authorize"); + } +} diff --git a/matrix-authentication-service/crates/spa/Cargo.toml b/matrix-authentication-service/crates/spa/Cargo.toml new file mode 100644 index 00000000..5287abf1 --- /dev/null +++ b/matrix-authentication-service/crates/spa/Cargo.toml @@ -0,0 +1,22 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-spa" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +camino.workspace = true +serde.workspace = true +thiserror.workspace = true diff --git a/matrix-authentication-service/crates/spa/src/lib.rs b/matrix-authentication-service/crates/spa/src/lib.rs new file mode 100644 index 00000000..7e38f977 --- /dev/null +++ b/matrix-authentication-service/crates/spa/src/lib.rs @@ -0,0 +1,13 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![deny(rustdoc::missing_crate_level_docs)] + +//! A crate to help serve single-page apps built by Vite. + +mod vite; + +pub use self::vite::{FileType, Manifest as ViteManifest}; diff --git a/matrix-authentication-service/crates/spa/src/vite.rs b/matrix-authentication-service/crates/spa/src/vite.rs new file mode 100644 index 00000000..b2d7c0c2 --- /dev/null +++ b/matrix-authentication-service/crates/spa/src/vite.rs @@ -0,0 +1,206 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::{BTreeSet, HashMap}; + +use camino::{Utf8Path, Utf8PathBuf}; +use thiserror::Error; + +#[derive(serde::Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct ManifestEntry { + #[expect(dead_code)] + name: Option, + + #[expect(dead_code)] + names: Option>, + + #[expect(dead_code)] + src: Option, + + file: Utf8PathBuf, + + css: Option>, + + assets: Option>, + + #[expect(dead_code)] + is_entry: Option, + + #[expect(dead_code)] + is_dynamic_entry: Option, + + imports: Option>, + + #[expect(dead_code)] + dynamic_imports: Option>, + + integrity: Option, +} + +#[derive(serde::Deserialize, Debug, Clone)] +pub struct Manifest { + #[serde(flatten)] + inner: HashMap, +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum FileType { + Script, + Stylesheet, + Woff, + Woff2, + Json, + Png, +} + +impl FileType { + fn from_name(name: &Utf8Path) -> Option { + match name.extension() { + Some("css") => Some(Self::Stylesheet), + Some("js") => Some(Self::Script), + Some("woff") => Some(Self::Woff), + Some("woff2") => Some(Self::Woff2), + Some("json") => Some(Self::Json), + Some("png") => Some(Self::Png), + _ => None, + } + } +} + +#[derive(Debug, Error)] +#[error("Invalid Vite manifest")] +pub enum InvalidManifest<'a> { + #[error("Can't find asset for name {name:?}")] + CantFindAssetByName { name: &'a Utf8Path }, + + #[error("Can't find asset for file {file:?}")] + CantFindAssetByFile { file: &'a Utf8Path }, + + #[error("Invalid file type")] + InvalidFileType, +} + +/// Represents an entry which should be preloaded and included +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Asset<'a> { + file_type: FileType, + name: &'a Utf8Path, + integrity: Option<&'a str>, +} + +impl<'a> Asset<'a> { + fn new(entry: &'a ManifestEntry) -> Result> { + let name = &entry.file; + let integrity = entry.integrity.as_deref(); + let file_type = FileType::from_name(name).ok_or(InvalidManifest::InvalidFileType)?; + Ok(Self { + file_type, + name, + integrity, + }) + } + + /// Get the source path of this asset, relative to the assets base path + #[must_use] + pub fn src(&self, assets_base: &Utf8Path) -> Utf8PathBuf { + assets_base.join(self.name) + } + + /// Get the file type of this asset + #[must_use] + pub fn file_type(&self) -> FileType { + self.file_type + } + + /// Get the integrity HTML tag attribute, with a leading space, if any + #[must_use] + pub fn integrity_attr(&self) -> String { + self.integrity + .map(|i| format!(r#" integrity="{i}""#)) + .unwrap_or_default() + } +} + +impl Manifest { + /// Find all assets which should be loaded for a given entrypoint + /// + /// Returns the main asset and all the assets it imports + /// + /// # Errors + /// + /// Returns an error if the entrypoint is invalid for this manifest + pub fn find_assets<'a>( + &'a self, + entrypoint: &'a Utf8Path, + ) -> Result<(Asset<'a>, BTreeSet>), InvalidManifest<'a>> { + let entry = self.lookup_by_name(entrypoint)?; + let mut entries = BTreeSet::new(); + let main_asset = self.find_imported_chunks(entry, &mut entries)?; + + // Remove the main asset from the set of imported entries. We had it mainly to + // deduplicate the list of assets, but we don't want to include it twice + entries.remove(&main_asset); + + Ok((main_asset, entries)) + } + + /// Lookup an entry in the manifest by its original name + fn lookup_by_name<'a>( + &self, + name: &'a Utf8Path, + ) -> Result<&ManifestEntry, InvalidManifest<'a>> { + self.inner + .get(name) + .ok_or(InvalidManifest::CantFindAssetByName { name }) + } + + /// Lookup an entry in the manifest by its output name + fn lookup_by_file<'a>( + &self, + file: &'a Utf8Path, + ) -> Result<&ManifestEntry, InvalidManifest<'a>> { + self.inner + .values() + .find(|e| e.file == file) + .ok_or(InvalidManifest::CantFindAssetByFile { file }) + } + + fn find_imported_chunks<'a>( + &'a self, + current_entry: &'a ManifestEntry, + entries: &mut BTreeSet>, + ) -> Result, InvalidManifest<'a>> { + let asset = Asset::new(current_entry)?; + let inserted = entries.insert(asset); + + // If we inserted the entry, we need to find its dependencies + if inserted { + if let Some(css) = ¤t_entry.css { + for file in css { + let entry = self.lookup_by_file(file)?; + self.find_imported_chunks(entry, entries)?; + } + } + + if let Some(assets) = ¤t_entry.assets { + for file in assets { + let entry = self.lookup_by_file(file)?; + self.find_imported_chunks(entry, entries)?; + } + } + + if let Some(imports) = ¤t_entry.imports { + for import in imports { + let entry = self.lookup_by_name(import)?; + self.find_imported_chunks(entry, entries)?; + } + } + } + + Ok(asset) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-015f7ad7c8d5403ce4dfb71d598fd9af472689d5aef7c1c4b1c594ca57c02237.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-015f7ad7c8d5403ce4dfb71d598fd9af472689d5aef7c1c4b1c594ca57c02237.json new file mode 100644 index 00000000..2daa69ab --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-015f7ad7c8d5403ce4dfb71d598fd9af472689d5aef7c1c4b1c594ca57c02237.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_authorization_grants\n SET fulfilled_at = $2\n , oauth2_session_id = $3\n WHERE oauth2_authorization_grant_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "015f7ad7c8d5403ce4dfb71d598fd9af472689d5aef7c1c4b1c594ca57c02237" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-037fae6964130343453ef607791c4c3deaa01b5aaa091d3a3487caf3e2634daf.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-037fae6964130343453ef607791c4c3deaa01b5aaa091d3a3487caf3e2634daf.json new file mode 100644 index 00000000..27b346a8 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-037fae6964130343453ef607791c4c3deaa01b5aaa091d3a3487caf3e2634daf.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_terms (user_terms_id, user_id, terms_url, created_at)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (user_id, terms_url) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "037fae6964130343453ef607791c4c3deaa01b5aaa091d3a3487caf3e2634daf" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-03eee34f05df9c79f8ca5bfb1af339b3fcea95ba59395106318366a6ef432d85.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-03eee34f05df9c79f8ca5bfb1af339b3fcea95ba59395106318366a6ef432d85.json new file mode 100644 index 00000000..12c48424 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-03eee34f05df9c79f8ca5bfb1af339b3fcea95ba59395106318366a6ef432d85.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_sessions\n SET last_active_at = GREATEST(t.last_active_at, user_sessions.last_active_at)\n , last_active_ip = COALESCE(t.last_active_ip, user_sessions.last_active_ip)\n FROM (\n SELECT *\n FROM UNNEST($1::uuid[], $2::timestamptz[], $3::inet[])\n AS t(user_session_id, last_active_at, last_active_ip)\n ) AS t\n WHERE user_sessions.user_session_id = t.user_session_id\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "TimestamptzArray", + "InetArray" + ] + }, + "nullable": [] + }, + "hash": "03eee34f05df9c79f8ca5bfb1af339b3fcea95ba59395106318366a6ef432d85" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-047990a99794b565c2cad396946299db5b617f52f6c24bcca0a24c0c185c4478.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-047990a99794b565c2cad396946299db5b617f52f6c24bcca0a24c0c185c4478.json new file mode 100644 index 00000000..8eb80f53 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-047990a99794b565c2cad396946299db5b617f52f6c24bcca0a24c0c185c4478.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_sessions\n SET last_active_at = GREATEST(t.last_active_at, oauth2_sessions.last_active_at)\n , last_active_ip = COALESCE(t.last_active_ip, oauth2_sessions.last_active_ip)\n FROM (\n SELECT *\n FROM UNNEST($1::uuid[], $2::timestamptz[], $3::inet[])\n AS t(oauth2_session_id, last_active_at, last_active_ip)\n ) AS t\n WHERE oauth2_sessions.oauth2_session_id = t.oauth2_session_id\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "TimestamptzArray", + "InetArray" + ] + }, + "nullable": [] + }, + "hash": "047990a99794b565c2cad396946299db5b617f52f6c24bcca0a24c0c185c4478" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-048eec775f4af3ffd805e830e8286c6a5745e523b76e1083d6bfced0035c2f76.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-048eec775f4af3ffd805e830e8286c6a5745e523b76e1083d6bfced0035c2f76.json new file mode 100644 index 00000000..707eead7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-048eec775f4af3ffd805e830e8286c6a5745e523b76e1083d6bfced0035c2f76.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE upstream_oauth_providers\n SET disabled_at = $2\n WHERE upstream_oauth_provider_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "048eec775f4af3ffd805e830e8286c6a5745e523b76e1083d6bfced0035c2f76" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-05b4dd39521eaf4e8e3c21654df67c00c8781f54054a84b3f3005b65cbc2a14a.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-05b4dd39521eaf4e8e3c21654df67c00c8781f54054a84b3f3005b65cbc2a14a.json new file mode 100644 index 00000000..b1164616 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-05b4dd39521eaf4e8e3c21654df67c00c8781f54054a84b3f3005b65cbc2a14a.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_email_authentications\n ( user_email_authentication_id\n , user_session_id\n , email\n , created_at\n )\n VALUES ($1, $2, $3, $4)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "05b4dd39521eaf4e8e3c21654df67c00c8781f54054a84b3f3005b65cbc2a14a" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-06d67595eeef23d5f2773632e0956577d98074e244a35c0d3be24bc18d9d0daa.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-06d67595eeef23d5f2773632e0956577d98074e244a35c0d3be24bc18d9d0daa.json new file mode 100644 index 00000000..55509569 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-06d67595eeef23d5f2773632e0956577d98074e244a35c0d3be24bc18d9d0daa.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE personal_sessions\n SET revoked_at = $2\n WHERE personal_session_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "06d67595eeef23d5f2773632e0956577d98074e244a35c0d3be24bc18d9d0daa" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-07cd2da428f0984513b4ce58e526c35c9c236ea8beb6696e5740fa45655e59f3.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-07cd2da428f0984513b4ce58e526c35c9c236ea8beb6696e5740fa45655e59f3.json new file mode 100644 index 00000000..e5ffe95e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-07cd2da428f0984513b4ce58e526c35c9c236ea8beb6696e5740fa45655e59f3.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE queue_jobs\n SET next_attempt_id = $1\n WHERE queue_job_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "07cd2da428f0984513b4ce58e526c35c9c236ea8beb6696e5740fa45655e59f3" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-093d42238578771b4183b48c1680ba438b6b18306dfe1454fa4124c0207b3deb.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-093d42238578771b4183b48c1680ba438b6b18306dfe1454fa4124c0207b3deb.json new file mode 100644 index 00000000..3af6ac9c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-093d42238578771b4183b48c1680ba438b6b18306dfe1454fa4124c0207b3deb.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n to_delete AS (\n SELECT rts_to_del.oauth2_refresh_token_id\n FROM oauth2_refresh_tokens rts_to_del\n LEFT JOIN oauth2_refresh_tokens next_rts\n ON rts_to_del.next_oauth2_refresh_token_id = next_rts.oauth2_refresh_token_id\n WHERE rts_to_del.consumed_at IS NOT NULL\n AND (rts_to_del.next_oauth2_refresh_token_id IS NULL OR next_rts.consumed_at IS NOT NULL)\n AND ($1::timestamptz IS NULL OR rts_to_del.consumed_at >= $1::timestamptz)\n AND rts_to_del.consumed_at < $2::timestamptz\n ORDER BY rts_to_del.consumed_at ASC\n LIMIT $3\n ),\n\n deleted AS (\n DELETE FROM oauth2_refresh_tokens\n USING to_delete\n WHERE oauth2_refresh_tokens.oauth2_refresh_token_id = to_delete.oauth2_refresh_token_id\n RETURNING oauth2_refresh_tokens.consumed_at\n )\n\n SELECT\n COUNT(*) as \"count!\",\n MAX(consumed_at) as last_consumed_at\n FROM deleted\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_consumed_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "093d42238578771b4183b48c1680ba438b6b18306dfe1454fa4124c0207b3deb" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-0e1bce56e15751d82a622d532b279bfc50e22cb12ddf7495c7b0fedca61f9421.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-0e1bce56e15751d82a622d532b279bfc50e22cb12ddf7495c7b0fedca61f9421.json new file mode 100644 index 00000000..afd0835b --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-0e1bce56e15751d82a622d532b279bfc50e22cb12ddf7495c7b0fedca61f9421.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_email_authentications\n ( user_email_authentication_id\n , user_registration_id\n , email\n , created_at\n )\n VALUES ($1, $2, $3, $4)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "0e1bce56e15751d82a622d532b279bfc50e22cb12ddf7495c7b0fedca61f9421" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-0e45995714e60b71e0f0158500a63aa46225245a04d1c7bc24b5275c44a6d58d.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-0e45995714e60b71e0f0158500a63aa46225245a04d1c7bc24b5275c44a6d58d.json new file mode 100644 index 00000000..5bba6548 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-0e45995714e60b71e0f0158500a63aa46225245a04d1c7bc24b5275c44a6d58d.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE personal_access_tokens\n SET revoked_at = $2\n WHERE personal_access_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "0e45995714e60b71e0f0158500a63aa46225245a04d1c7bc24b5275c44a6d58d" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c.json new file mode 100644 index 00000000..1eb87fd3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO upstream_oauth_providers (\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n token_endpoint_auth_method,\n token_endpoint_signing_alg,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n client_id,\n encrypted_client_secret,\n claims_imports,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n jwks_uri_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n additional_parameters,\n forward_login_hint,\n ui_order,\n on_backchannel_logout,\n created_at\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,\n $11, $12, $13, $14, $15, $16, $17, $18, $19, $20,\n $21, $22, $23, $24, $25)\n ON CONFLICT (upstream_oauth_provider_id)\n DO UPDATE\n SET\n issuer = EXCLUDED.issuer,\n human_name = EXCLUDED.human_name,\n brand_name = EXCLUDED.brand_name,\n scope = EXCLUDED.scope,\n token_endpoint_auth_method = EXCLUDED.token_endpoint_auth_method,\n token_endpoint_signing_alg = EXCLUDED.token_endpoint_signing_alg,\n id_token_signed_response_alg = EXCLUDED.id_token_signed_response_alg,\n fetch_userinfo = EXCLUDED.fetch_userinfo,\n userinfo_signed_response_alg = EXCLUDED.userinfo_signed_response_alg,\n disabled_at = NULL,\n client_id = EXCLUDED.client_id,\n encrypted_client_secret = EXCLUDED.encrypted_client_secret,\n claims_imports = EXCLUDED.claims_imports,\n authorization_endpoint_override = EXCLUDED.authorization_endpoint_override,\n token_endpoint_override = EXCLUDED.token_endpoint_override,\n userinfo_endpoint_override = EXCLUDED.userinfo_endpoint_override,\n jwks_uri_override = EXCLUDED.jwks_uri_override,\n discovery_mode = EXCLUDED.discovery_mode,\n pkce_mode = EXCLUDED.pkce_mode,\n response_mode = EXCLUDED.response_mode,\n additional_parameters = EXCLUDED.additional_parameters,\n forward_login_hint = EXCLUDED.forward_login_hint,\n ui_order = EXCLUDED.ui_order,\n on_backchannel_logout = EXCLUDED.on_backchannel_logout\n RETURNING created_at\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Bool", + "Text", + "Text", + "Text", + "Jsonb", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Jsonb", + "Bool", + "Int4", + "Text", + "Timestamptz" + ] + }, + "nullable": [ + false + ] + }, + "hash": "0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-109f0c859e123966462f1001aef550e4e12d1778474aba72762d9aa093d21ee2.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-109f0c859e123966462f1001aef550e4e12d1778474aba72762d9aa093d21ee2.json new file mode 100644 index 00000000..83400921 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-109f0c859e123966462f1001aef550e4e12d1778474aba72762d9aa093d21ee2.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO personal_sessions\n ( personal_session_id\n , owner_user_id\n , owner_oauth2_client_id\n , actor_user_id\n , human_name\n , scope_list\n , created_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Uuid", + "Uuid", + "Text", + "TextArray", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "109f0c859e123966462f1001aef550e4e12d1778474aba72762d9aa093d21ee2" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-12c4577701416a9dc23708c46700f3f086e4e62c6de9d6864a6a11a2470ebe62.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-12c4577701416a9dc23708c46700f3f086e4e62c6de9d6864a6a11a2470ebe62.json new file mode 100644 index 00000000..dce1983f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-12c4577701416a9dc23708c46700f3f086e4e62c6de9d6864a6a11a2470ebe62.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO queue_workers (queue_worker_id, registered_at, last_seen_at)\n VALUES ($1, $2, $2)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "12c4577701416a9dc23708c46700f3f086e4e62c6de9d6864a6a11a2470ebe62" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-1764715e59f879f6b917ca30f8e3c1de5910c7a46e7fe52d1fb3bfd5561ac320.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1764715e59f879f6b917ca30f8e3c1de5910c7a46e7fe52d1fb3bfd5561ac320.json new file mode 100644 index 00000000..3a1232f2 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1764715e59f879f6b917ca30f8e3c1de5910c7a46e7fe52d1fb3bfd5561ac320.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_recovery_sessions\n SET consumed_at = $1\n WHERE user_recovery_session_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "1764715e59f879f6b917ca30f8e3c1de5910c7a46e7fe52d1fb3bfd5561ac320" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-188a4aeef5a8b4bf3230c7176ded64d52804848df378dc74f8f54ec4404e094e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-188a4aeef5a8b4bf3230c7176ded64d52804848df378dc74f8f54ec4404e094e.json new file mode 100644 index 00000000..e6c0970c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-188a4aeef5a8b4bf3230c7176ded64d52804848df378dc74f8f54ec4404e094e.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registrations\n SET terms_url = $2\n WHERE user_registration_id = $1 AND completed_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text" + ] + }, + "nullable": [] + }, + "hash": "188a4aeef5a8b4bf3230c7176ded64d52804848df378dc74f8f54ec4404e094e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-1919d402fd6f148d14417f633be3353004f458c85f7b4f361802f86651900fbc.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1919d402fd6f148d14417f633be3353004f458c85f7b4f361802f86651900fbc.json new file mode 100644 index 00000000..326ab111 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1919d402fd6f148d14417f633be3353004f458c85f7b4f361802f86651900fbc.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_sessions\n SET user_agent = $2\n WHERE oauth2_session_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text" + ] + }, + "nullable": [] + }, + "hash": "1919d402fd6f148d14417f633be3353004f458c85f7b4f361802f86651900fbc" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-1b547552eed4128f2227c681ff2d45586cdb0c20b98393f89036fbf0f1d2dee2.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1b547552eed4128f2227c681ff2d45586cdb0c20b98393f89036fbf0f1d2dee2.json new file mode 100644 index 00000000..33d65ac3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1b547552eed4128f2227c681ff2d45586cdb0c20b98393f89036fbf0f1d2dee2.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO oauth2_sessions\n ( oauth2_session_id\n , user_id\n , user_session_id\n , oauth2_client_id\n , scope_list\n , created_at\n )\n VALUES ($1, $2, $3, $4, $5, $6)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Uuid", + "Uuid", + "TextArray", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "1b547552eed4128f2227c681ff2d45586cdb0c20b98393f89036fbf0f1d2dee2" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-1dbc50cdab36da307c569891ab7b1ab4aaf128fed6be67ca0f139d697614c63b.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1dbc50cdab36da307c569891ab7b1ab4aaf128fed6be67ca0f139d697614c63b.json new file mode 100644 index 00000000..f4e08b5c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1dbc50cdab36da307c569891ab7b1ab4aaf128fed6be67ca0f139d697614c63b.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE users\n SET can_request_admin = $2\n WHERE user_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "1dbc50cdab36da307c569891ab7b1ab4aaf128fed6be67ca0f139d697614c63b" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-1eb829460407fca22b717b88a1a0a9b7b920d807a4b6c235e1bee524cd73b266.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1eb829460407fca22b717b88a1a0a9b7b920d807a4b6c235e1bee524cd73b266.json new file mode 100644 index 00000000..ce332c08 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-1eb829460407fca22b717b88a1a0a9b7b920d807a4b6c235e1bee524cd73b266.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM upstream_oauth_links\n WHERE upstream_oauth_provider_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "1eb829460407fca22b717b88a1a0a9b7b920d807a4b6c235e1bee524cd73b266" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-21b9e39ffd89de288305765c339a991d2471667cf2981770447cde6fd025fbb7.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-21b9e39ffd89de288305765c339a991d2471667cf2981770447cde6fd025fbb7.json new file mode 100644 index 00000000..3b3f65b2 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-21b9e39ffd89de288305765c339a991d2471667cf2981770447cde6fd025fbb7.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registration_tokens\n SET expires_at = $2\n WHERE user_registration_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "21b9e39ffd89de288305765c339a991d2471667cf2981770447cde6fd025fbb7" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-22896e8f2a002f307089c3e0f9ee561e6521c45ce07d3a42411984c9a6b75fdc.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-22896e8f2a002f307089c3e0f9ee561e6521c45ce07d3a42411984c9a6b75fdc.json new file mode 100644 index 00000000..8d157756 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-22896e8f2a002f307089c3e0f9ee561e6521c45ce07d3a42411984c9a6b75fdc.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE users\n SET locked_at = NULL\n WHERE user_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "22896e8f2a002f307089c3e0f9ee561e6521c45ce07d3a42411984c9a6b75fdc" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-23d5fcd8bf611dc7279bef0d66ce05461c3c1f43f966fee3a80ae42540783f08.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-23d5fcd8bf611dc7279bef0d66ce05461c3c1f43f966fee3a80ae42540783f08.json new file mode 100644 index 00000000..28391d84 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-23d5fcd8bf611dc7279bef0d66ce05461c3c1f43f966fee3a80ae42540783f08.json @@ -0,0 +1,58 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_id\n , username\n , created_at\n , locked_at\n , deactivated_at\n , can_request_admin\n , is_guest\n FROM users\n WHERE LOWER(username) = LOWER($1)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "username", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "locked_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "deactivated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "can_request_admin", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "is_guest", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + false + ] + }, + "hash": "23d5fcd8bf611dc7279bef0d66ce05461c3c1f43f966fee3a80ae42540783f08" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-245cab1cf7d9cf4e94cdec91ecb4dc8e678278121efbe1f66bcdc24144d684d0.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-245cab1cf7d9cf4e94cdec91ecb4dc8e678278121efbe1f66bcdc24144d684d0.json new file mode 100644 index 00000000..b6635baa --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-245cab1cf7d9cf4e94cdec91ecb4dc8e678278121efbe1f66bcdc24144d684d0.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO queue_jobs\n (queue_job_id, queue_name, payload, metadata, created_at, scheduled_at, schedule_name, status)\n VALUES ($1, $2, $3, $4, $5, $6, $7, 'scheduled')\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Jsonb", + "Jsonb", + "Timestamptz", + "Timestamptz", + "Text" + ] + }, + "nullable": [] + }, + "hash": "245cab1cf7d9cf4e94cdec91ecb4dc8e678278121efbe1f66bcdc24144d684d0" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-2564bf6366eb59268c41fb25bb40d0e4e9e1fd1f9ea53b7a359c9025d7304223.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2564bf6366eb59268c41fb25bb40d0e4e9e1fd1f9ea53b7a359c9025d7304223.json new file mode 100644 index 00000000..64a8786b --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2564bf6366eb59268c41fb25bb40d0e4e9e1fd1f9ea53b7a359c9025d7304223.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_access_tokens\n SET revoked_at = $2\n WHERE oauth2_access_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "2564bf6366eb59268c41fb25bb40d0e4e9e1fd1f9ea53b7a359c9025d7304223" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-29148548d592046f7d711676911e3847e376e443ccd841f76b17a81f53fafc3a.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-29148548d592046f7d711676911e3847e376e443ccd841f76b17a81f53fafc3a.json new file mode 100644 index 00000000..6c84cb92 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-29148548d592046f7d711676911e3847e376e443ccd841f76b17a81f53fafc3a.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE compat_sessions\n SET user_agent = $2\n WHERE compat_session_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text" + ] + }, + "nullable": [] + }, + "hash": "29148548d592046f7d711676911e3847e376e443ccd841f76b17a81f53fafc3a" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-2a0d8d70d21afa9a2c9c1c432853361bb85911c48f7db6c3873b0f5abf35940b.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2a0d8d70d21afa9a2c9c1c432853361bb85911c48f7db6c3873b0f5abf35940b.json new file mode 100644 index 00000000..02ac95d8 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2a0d8d70d21afa9a2c9c1c432853361bb85911c48f7db6c3873b0f5abf35940b.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM oauth2_authorization_grants\n WHERE oauth2_client_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "2a0d8d70d21afa9a2c9c1c432853361bb85911c48f7db6c3873b0f5abf35940b" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-2a61003da3655158e6a261d91fdff670f1b4ba3c56605c53e2b905d7ec38c8be.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2a61003da3655158e6a261d91fdff670f1b4ba3c56605c53e2b905d7ec38c8be.json new file mode 100644 index 00000000..21a67060 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2a61003da3655158e6a261d91fdff670f1b4ba3c56605c53e2b905d7ec38c8be.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM personal_access_tokens\n WHERE personal_session_id IN (\n SELECT personal_session_id\n FROM personal_sessions\n WHERE owner_oauth2_client_id = $1\n )\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "2a61003da3655158e6a261d91fdff670f1b4ba3c56605c53e2b905d7ec38c8be" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-2d249684e0e4db0e3bc189f821521657559d9b77fd931f972ce4d9f03a57f97a.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2d249684e0e4db0e3bc189f821521657559d9b77fd931f972ce4d9f03a57f97a.json new file mode 100644 index 00000000..f00999bf --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2d249684e0e4db0e3bc189f821521657559d9b77fd931f972ce4d9f03a57f97a.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n to_delete AS (\n SELECT oauth2_access_token_id\n FROM oauth2_access_tokens\n WHERE expires_at IS NOT NULL\n AND ($1::timestamptz IS NULL OR expires_at >= $1::timestamptz)\n AND expires_at < $2::timestamptz\n ORDER BY expires_at ASC\n LIMIT $3\n FOR UPDATE\n ),\n\n deleted AS (\n DELETE FROM oauth2_access_tokens\n USING to_delete\n WHERE oauth2_access_tokens.oauth2_access_token_id = to_delete.oauth2_access_token_id\n RETURNING oauth2_access_tokens.expires_at\n )\n\n SELECT\n COUNT(*) as \"count!\",\n MAX(expires_at) as last_expires_at\n FROM deleted\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_expires_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "2d249684e0e4db0e3bc189f821521657559d9b77fd931f972ce4d9f03a57f97a" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-2ee26886c56f04cd53d4c0968f5cf0963f92b6d15e6af0e69378a6447dee677c.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2ee26886c56f04cd53d4c0968f5cf0963f92b6d15e6af0e69378a6447dee677c.json new file mode 100644 index 00000000..628a0c9c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2ee26886c56f04cd53d4c0968f5cf0963f92b6d15e6af0e69378a6447dee677c.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM oauth2_access_tokens\n WHERE oauth2_session_id IN (\n SELECT oauth2_session_id\n FROM oauth2_sessions\n WHERE oauth2_client_id = $1\n )\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "2ee26886c56f04cd53d4c0968f5cf0963f92b6d15e6af0e69378a6447dee677c" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-2f66991d7b9ba58f011d9aef0eb6a38f3b244c2f46444c0ab345de7feff54aba.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2f66991d7b9ba58f011d9aef0eb6a38f3b244c2f46444c0ab345de7feff54aba.json new file mode 100644 index 00000000..7fb8be86 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2f66991d7b9ba58f011d9aef0eb6a38f3b244c2f46444c0ab345de7feff54aba.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT current_database() as \"current_database!\"", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "current_database!", + "type_info": "Name" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "2f66991d7b9ba58f011d9aef0eb6a38f3b244c2f46444c0ab345de7feff54aba" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-2f7aba76cd7df75d6a9a6d91d5ddebaedf37437f3bd4f796f5581fab997587d7.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2f7aba76cd7df75d6a9a6d91d5ddebaedf37437f3bd4f796f5581fab997587d7.json new file mode 100644 index 00000000..6b66e72e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2f7aba76cd7df75d6a9a6d91d5ddebaedf37437f3bd4f796f5581fab997587d7.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE users\n SET deactivated_at = $2\n WHERE user_id = $1\n AND deactivated_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "2f7aba76cd7df75d6a9a6d91d5ddebaedf37437f3bd4f796f5581fab997587d7" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-2f8d402b7217aef47a5c45d4f7cfddbaeedcbbc6963ee573409bfc98e57de6ed.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2f8d402b7217aef47a5c45d4f7cfddbaeedcbbc6963ee573409bfc98e57de6ed.json new file mode 100644 index 00000000..473db95d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-2f8d402b7217aef47a5c45d4f7cfddbaeedcbbc6963ee573409bfc98e57de6ed.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_email_authentication_codes\n ( user_email_authentication_code_id\n , user_email_authentication_id\n , code\n , created_at\n , expires_at\n )\n VALUES ($1, $2, $3, $4, $5)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "2f8d402b7217aef47a5c45d4f7cfddbaeedcbbc6963ee573409bfc98e57de6ed" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-31e8bf68ff70a436fd0b6787ac8e2777f9327708b450d048638a162343478cc6.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-31e8bf68ff70a436fd0b6787ac8e2777f9327708b450d048638a162343478cc6.json new file mode 100644 index 00000000..84e37eb5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-31e8bf68ff70a436fd0b6787ac8e2777f9327708b450d048638a162343478cc6.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n to_delete AS (\n SELECT oauth2_refresh_token_id\n FROM oauth2_refresh_tokens\n WHERE revoked_at IS NOT NULL\n AND ($1::timestamptz IS NULL OR revoked_at >= $1::timestamptz)\n AND revoked_at < $2::timestamptz\n ORDER BY revoked_at ASC\n LIMIT $3\n FOR UPDATE\n ),\n\n deleted AS (\n DELETE FROM oauth2_refresh_tokens\n USING to_delete\n WHERE oauth2_refresh_tokens.oauth2_refresh_token_id = to_delete.oauth2_refresh_token_id\n RETURNING oauth2_refresh_tokens.revoked_at\n )\n\n SELECT\n COUNT(*) as \"count!\",\n MAX(revoked_at) as last_revoked_at\n FROM deleted\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_revoked_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "31e8bf68ff70a436fd0b6787ac8e2777f9327708b450d048638a162343478cc6" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81.json new file mode 100644 index 00000000..3f837630 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81.json @@ -0,0 +1,36 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO upstream_oauth_providers (\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n token_endpoint_auth_method,\n token_endpoint_signing_alg,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n client_id,\n encrypted_client_secret,\n claims_imports,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n jwks_uri_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n forward_login_hint,\n on_backchannel_logout,\n created_at\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11,\n $12, $13, $14, $15, $16, $17, $18, $19, $20,\n $21, $22, $23)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Bool", + "Text", + "Text", + "Text", + "Jsonb", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Bool", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-38d0608b7d8ba30927f939491c1d43cfd962c729298ad07ee1ade2f2880c0eb3.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-38d0608b7d8ba30927f939491c1d43cfd962c729298ad07ee1ade2f2880c0eb3.json new file mode 100644 index 00000000..d4dcbda4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-38d0608b7d8ba30927f939491c1d43cfd962c729298ad07ee1ade2f2880c0eb3.json @@ -0,0 +1,142 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_client_id\n , metadata_digest\n , encrypted_client_secret\n , application_type\n , redirect_uris\n , grant_type_authorization_code\n , grant_type_refresh_token\n , grant_type_client_credentials\n , grant_type_device_code\n , client_name\n , logo_uri\n , client_uri\n , policy_uri\n , tos_uri\n , jwks_uri\n , jwks\n , id_token_signed_response_alg\n , userinfo_signed_response_alg\n , token_endpoint_auth_method\n , token_endpoint_auth_signing_alg\n , initiate_login_uri\n FROM oauth2_clients c\n\n WHERE oauth2_client_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "metadata_digest", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "encrypted_client_secret", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "application_type", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "redirect_uris", + "type_info": "TextArray" + }, + { + "ordinal": 5, + "name": "grant_type_authorization_code", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "grant_type_refresh_token", + "type_info": "Bool" + }, + { + "ordinal": 7, + "name": "grant_type_client_credentials", + "type_info": "Bool" + }, + { + "ordinal": 8, + "name": "grant_type_device_code", + "type_info": "Bool" + }, + { + "ordinal": 9, + "name": "client_name", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "logo_uri", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "client_uri", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "policy_uri", + "type_info": "Text" + }, + { + "ordinal": 13, + "name": "tos_uri", + "type_info": "Text" + }, + { + "ordinal": 14, + "name": "jwks_uri", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "jwks", + "type_info": "Jsonb" + }, + { + "ordinal": 16, + "name": "id_token_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "userinfo_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "token_endpoint_auth_method", + "type_info": "Text" + }, + { + "ordinal": 19, + "name": "token_endpoint_auth_signing_alg", + "type_info": "Text" + }, + { + "ordinal": 20, + "name": "initiate_login_uri", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + true, + true, + true, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "38d0608b7d8ba30927f939491c1d43cfd962c729298ad07ee1ade2f2880c0eb3" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-38eb6b635d30ca78ff78b926b414cbd866cfc2918ca4b1741b5687f21cfe273b.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-38eb6b635d30ca78ff78b926b414cbd866cfc2918ca4b1741b5687f21cfe273b.json new file mode 100644 index 00000000..883d2174 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-38eb6b635d30ca78ff78b926b414cbd866cfc2918ca4b1741b5687f21cfe273b.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_email_id\n , user_id\n , email\n , created_at\n FROM user_emails\n\n WHERE user_email_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_email_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false + ] + }, + "hash": "38eb6b635d30ca78ff78b926b414cbd866cfc2918ca4b1741b5687f21cfe273b" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-399e261027fe6c9167511636157ab747a469404533f59ff6fbd56e9eb5ad38e1.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-399e261027fe6c9167511636157ab747a469404533f59ff6fbd56e9eb5ad38e1.json new file mode 100644 index 00000000..f0a50a64 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-399e261027fe6c9167511636157ab747a469404533f59ff6fbd56e9eb5ad38e1.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM queue_leader\n WHERE queue_worker_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "399e261027fe6c9167511636157ab747a469404533f59ff6fbd56e9eb5ad38e1" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-3c7960a2eb2edd71bc71177fc0fb2e83858c9944893b8f3a0f0131e8a9b7a494.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3c7960a2eb2edd71bc71177fc0fb2e83858c9944893b8f3a0f0131e8a9b7a494.json new file mode 100644 index 00000000..a45aacc7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3c7960a2eb2edd71bc71177fc0fb2e83858c9944893b8f3a0f0131e8a9b7a494.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE queue_jobs\n SET status = 'available'\n WHERE\n status = 'scheduled'\n AND scheduled_at <= $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "3c7960a2eb2edd71bc71177fc0fb2e83858c9944893b8f3a0f0131e8a9b7a494" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-3c7fc3e386ce51187f6344ad65e1d78a7f026e8311bdc7d5ccc2f39d962e898f.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3c7fc3e386ce51187f6344ad65e1d78a7f026e8311bdc7d5ccc2f39d962e898f.json new file mode 100644 index 00000000..8c2b2f4c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3c7fc3e386ce51187f6344ad65e1d78a7f026e8311bdc7d5ccc2f39d962e898f.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registration_tokens\n SET revoked_at = NULL\n WHERE user_registration_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "3c7fc3e386ce51187f6344ad65e1d78a7f026e8311bdc7d5ccc2f39d962e898f" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-3d66f3121b11ce923b9c60609b510a8ca899640e78cc8f5b03168622928ffe94.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3d66f3121b11ce923b9c60609b510a8ca899640e78cc8f5b03168622928ffe94.json new file mode 100644 index 00000000..3e2db4fc --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3d66f3121b11ce923b9c60609b510a8ca899640e78cc8f5b03168622928ffe94.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM user_emails\n WHERE user_email_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "3d66f3121b11ce923b9c60609b510a8ca899640e78cc8f5b03168622928ffe94" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-3e6e3aad53b22fc53eb3ee881b29bb249b18ced57d6a4809dffc23972b3e9423.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3e6e3aad53b22fc53eb3ee881b29bb249b18ced57d6a4809dffc23972b3e9423.json new file mode 100644 index 00000000..a930b70e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3e6e3aad53b22fc53eb3ee881b29bb249b18ced57d6a4809dffc23972b3e9423.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE queue_schedules\n SET last_scheduled_at = $1,\n last_scheduled_job_id = $2\n WHERE schedule_name = $3\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid", + "Text" + ] + }, + "nullable": [] + }, + "hash": "3e6e3aad53b22fc53eb3ee881b29bb249b18ced57d6a4809dffc23972b3e9423" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-3ed73cfce8ef6a1108f454e18b1668f64b76975dba07e67d04ed7a52e2e8107f.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3ed73cfce8ef6a1108f454e18b1668f64b76975dba07e67d04ed7a52e2e8107f.json new file mode 100644 index 00000000..49d451a1 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3ed73cfce8ef6a1108f454e18b1668f64b76975dba07e67d04ed7a52e2e8107f.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE upstream_oauth_authorization_sessions SET\n upstream_oauth_link_id = NULL,\n unlinked_at = $2\n WHERE upstream_oauth_link_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "3ed73cfce8ef6a1108f454e18b1668f64b76975dba07e67d04ed7a52e2e8107f" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-3f9d76f442c82a1631da931950b83b80c9620e1825ab07ab6c52f3f1a32d2527.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3f9d76f442c82a1631da931950b83b80c9620e1825ab07ab6c52f3f1a32d2527.json new file mode 100644 index 00000000..23108454 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-3f9d76f442c82a1631da931950b83b80c9620e1825ab07ab6c52f3f1a32d2527.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE compat_sso_logins\n SET\n user_session_id = $2,\n fulfilled_at = $3\n WHERE\n compat_sso_login_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "3f9d76f442c82a1631da931950b83b80c9620e1825ab07ab6c52f3f1a32d2527" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-432e199b0d47fe299d840c91159726c0a4f89f65b4dc3e33ddad58aabf6b148b.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-432e199b0d47fe299d840c91159726c0a4f89f65b4dc3e33ddad58aabf6b148b.json new file mode 100644 index 00000000..44bd300b --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-432e199b0d47fe299d840c91159726c0a4f89f65b4dc3e33ddad58aabf6b148b.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT compat_refresh_token_id\n , refresh_token\n , created_at\n , consumed_at\n , compat_session_id\n , compat_access_token_id\n\n FROM compat_refresh_tokens\n\n WHERE refresh_token = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "compat_refresh_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "refresh_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "consumed_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "compat_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 5, + "name": "compat_access_token_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + false, + false + ] + }, + "hash": "432e199b0d47fe299d840c91159726c0a4f89f65b4dc3e33ddad58aabf6b148b" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-446a8d7bd8532a751810401adfab924dc20785c91770ed43d62df2e590e8da71.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-446a8d7bd8532a751810401adfab924dc20785c91770ed43d62df2e590e8da71.json new file mode 100644 index 00000000..3b8b6793 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-446a8d7bd8532a751810401adfab924dc20785c91770ed43d62df2e590e8da71.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT up.user_password_id\n , up.hashed_password\n , up.version\n , up.upgraded_from_id\n , up.created_at\n FROM user_passwords up\n WHERE up.user_id = $1\n ORDER BY up.created_at DESC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_password_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "hashed_password", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "version", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "upgraded_from_id", + "type_info": "Uuid" + }, + { + "ordinal": 4, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + false + ] + }, + "hash": "446a8d7bd8532a751810401adfab924dc20785c91770ed43d62df2e590e8da71" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-45d7e962d91fcdcf8284d81d04bc0737c0d20799b497089a566e2ff704d56b67.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-45d7e962d91fcdcf8284d81d04bc0737c0d20799b497089a566e2ff704d56b67.json new file mode 100644 index 00000000..4f6a8d92 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-45d7e962d91fcdcf8284d81d04bc0737c0d20799b497089a566e2ff704d56b67.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH to_delete AS (\n SELECT oauth2_device_code_grant_id\n FROM oauth2_device_code_grant\n WHERE ($1::uuid IS NULL OR oauth2_device_code_grant_id > $1)\n AND oauth2_device_code_grant_id <= $2\n ORDER BY oauth2_device_code_grant_id\n LIMIT $3\n )\n DELETE FROM oauth2_device_code_grant\n USING to_delete\n WHERE oauth2_device_code_grant.oauth2_device_code_grant_id = to_delete.oauth2_device_code_grant_id\n RETURNING oauth2_device_code_grant.oauth2_device_code_grant_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_device_code_grant_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "45d7e962d91fcdcf8284d81d04bc0737c0d20799b497089a566e2ff704d56b67" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-494ca16f0f00f977a3031924a15318aa7346917e5c8a37bb0f5b2b3067588009.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-494ca16f0f00f977a3031924a15318aa7346917e5c8a37bb0f5b2b3067588009.json new file mode 100644 index 00000000..f7e0bd73 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-494ca16f0f00f977a3031924a15318aa7346917e5c8a37bb0f5b2b3067588009.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n to_delete AS (\n SELECT compat_session_id, finished_at\n FROM compat_sessions\n WHERE finished_at IS NOT NULL\n AND ($1::timestamptz IS NULL OR finished_at >= $1)\n AND finished_at < $2\n ORDER BY finished_at ASC\n LIMIT $3\n FOR UPDATE\n ),\n\n -- Delete refresh tokens first because they reference access tokens\n deleted_refresh_tokens AS (\n DELETE FROM compat_refresh_tokens\n USING to_delete\n WHERE compat_refresh_tokens.compat_session_id = to_delete.compat_session_id\n ),\n\n deleted_access_tokens AS (\n DELETE FROM compat_access_tokens\n USING to_delete\n WHERE compat_access_tokens.compat_session_id = to_delete.compat_session_id\n ),\n\n deleted_sso_logins AS (\n DELETE FROM compat_sso_logins\n USING to_delete\n WHERE compat_sso_logins.compat_session_id = to_delete.compat_session_id\n ),\n\n deleted_sessions AS (\n DELETE FROM compat_sessions\n USING to_delete\n WHERE compat_sessions.compat_session_id = to_delete.compat_session_id\n RETURNING compat_sessions.finished_at\n )\n\n SELECT\n COUNT(*) as \"count!\",\n MAX(finished_at) as last_finished_at\n FROM deleted_sessions\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_finished_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "494ca16f0f00f977a3031924a15318aa7346917e5c8a37bb0f5b2b3067588009" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-4968c60adef69c7215a7efe2021baffb050b2f475ae106155c2e2f210a81191a.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4968c60adef69c7215a7efe2021baffb050b2f475ae106155c2e2f210a81191a.json new file mode 100644 index 00000000..ae85c032 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4968c60adef69c7215a7efe2021baffb050b2f475ae106155c2e2f210a81191a.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registrations\n SET email_authentication_id = $2\n WHERE user_registration_id = $1 AND completed_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "4968c60adef69c7215a7efe2021baffb050b2f475ae106155c2e2f210a81191a" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-4c2064fed8fa464ea3d2a1258fb0544dbf1493cad31a21c0cd7ddb57ed12de16.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4c2064fed8fa464ea3d2a1258fb0544dbf1493cad31a21c0cd7ddb57ed12de16.json new file mode 100644 index 00000000..4d1f69f1 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4c2064fed8fa464ea3d2a1258fb0544dbf1493cad31a21c0cd7ddb57ed12de16.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_session_authentication_id\n , created_at\n , user_password_id\n , upstream_oauth_authorization_session_id\n FROM user_session_authentications\n WHERE user_session_id = $1\n ORDER BY created_at DESC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_session_authentication_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 2, + "name": "user_password_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "upstream_oauth_authorization_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + true, + true + ] + }, + "hash": "4c2064fed8fa464ea3d2a1258fb0544dbf1493cad31a21c0cd7ddb57ed12de16" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-4c37988dacca5a83c8b64209042d5f1a8ec44ec8ccccad2d7fce9ac855209883.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4c37988dacca5a83c8b64209042d5f1a8ec44ec8ccccad2d7fce9ac855209883.json new file mode 100644 index 00000000..1151ca3e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4c37988dacca5a83c8b64209042d5f1a8ec44ec8ccccad2d7fce9ac855209883.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registrations\n SET upstream_oauth_authorization_session_id = $2\n WHERE user_registration_id = $1 AND completed_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "4c37988dacca5a83c8b64209042d5f1a8ec44ec8ccccad2d7fce9ac855209883" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-4d0386ad2fe47f1aded46917abe6141752ba90d36467693a68318573171d57b0.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4d0386ad2fe47f1aded46917abe6141752ba90d36467693a68318573171d57b0.json new file mode 100644 index 00000000..0b5fb620 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4d0386ad2fe47f1aded46917abe6141752ba90d36467693a68318573171d57b0.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO oauth2_clients\n ( oauth2_client_id\n , metadata_digest\n , encrypted_client_secret\n , application_type\n , redirect_uris\n , grant_type_authorization_code\n , grant_type_refresh_token\n , grant_type_client_credentials\n , grant_type_device_code\n , client_name\n , logo_uri\n , client_uri\n , policy_uri\n , tos_uri\n , jwks_uri\n , jwks\n , id_token_signed_response_alg\n , userinfo_signed_response_alg\n , token_endpoint_auth_method\n , token_endpoint_auth_signing_alg\n , initiate_login_uri\n , is_static\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13,\n $14, $15, $16, $17, $18, $19, $20, $21, FALSE)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text", + "Text", + "TextArray", + "Bool", + "Bool", + "Bool", + "Bool", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Jsonb", + "Text", + "Text", + "Text", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "4d0386ad2fe47f1aded46917abe6141752ba90d36467693a68318573171d57b0" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-4dad1838536c10ba723adc0fb6da0f24afb3d6a1925a80a1b6d35b9a8258a0ce.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4dad1838536c10ba723adc0fb6da0f24afb3d6a1925a80a1b6d35b9a8258a0ce.json new file mode 100644 index 00000000..6c8cdbe8 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4dad1838536c10ba723adc0fb6da0f24afb3d6a1925a80a1b6d35b9a8258a0ce.json @@ -0,0 +1,58 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_id\n , username\n , created_at\n , locked_at\n , deactivated_at\n , can_request_admin\n , is_guest\n FROM users\n WHERE user_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "username", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "locked_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "deactivated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "can_request_admin", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "is_guest", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + false + ] + }, + "hash": "4dad1838536c10ba723adc0fb6da0f24afb3d6a1925a80a1b6d35b9a8258a0ce" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-4e64540bbffe5f4b9c4a6589012cf69eb67adaa4d40fc1910dfcd2640e32ab37.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4e64540bbffe5f4b9c4a6589012cf69eb67adaa4d40fc1910dfcd2640e32ab37.json new file mode 100644 index 00000000..54b80117 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-4e64540bbffe5f4b9c4a6589012cf69eb67adaa4d40fc1910dfcd2640e32ab37.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE compat_refresh_tokens\n SET consumed_at = $2\n WHERE compat_session_id = $1\n AND consumed_at IS NULL\n AND compat_refresh_token_id <> $3\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "4e64540bbffe5f4b9c4a6589012cf69eb67adaa4d40fc1910dfcd2640e32ab37" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5006c3e60c98c91a0b0fbb3205373e81d9b75e90929af80961f8b5910873a43e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5006c3e60c98c91a0b0fbb3205373e81d9b75e90929af80961f8b5910873a43e.json new file mode 100644 index 00000000..d13316a5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5006c3e60c98c91a0b0fbb3205373e81d9b75e90929af80961f8b5910873a43e.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM policy_data\n WHERE policy_data_id IN (\n SELECT policy_data_id\n FROM policy_data\n ORDER BY policy_data_id DESC\n OFFSET $1\n )\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "5006c3e60c98c91a0b0fbb3205373e81d9b75e90929af80961f8b5910873a43e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5133f9c5ba06201433be4ec784034d222975d084d0a9ebe7f1b6b865ab2e09ef.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5133f9c5ba06201433be4ec784034d222975d084d0a9ebe7f1b6b865ab2e09ef.json new file mode 100644 index 00000000..22739847 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5133f9c5ba06201433be4ec784034d222975d084d0a9ebe7f1b6b865ab2e09ef.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registration_tokens\n SET times_used = times_used + 1,\n last_used_at = $2\n WHERE user_registration_token_id = $1 AND revoked_at IS NULL\n RETURNING times_used\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "times_used", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [ + false + ] + }, + "hash": "5133f9c5ba06201433be4ec784034d222975d084d0a9ebe7f1b6b865ab2e09ef" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-535225206622b9190ccf42f7d66268818dc84c37b168ab45e582e0a727796a06.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-535225206622b9190ccf42f7d66268818dc84c37b168ab45e582e0a727796a06.json new file mode 100644 index 00000000..d7f20853 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-535225206622b9190ccf42f7d66268818dc84c37b168ab45e582e0a727796a06.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH to_update AS (\n SELECT user_session_id, last_active_at\n FROM user_sessions\n WHERE last_active_ip IS NOT NULL\n AND last_active_at IS NOT NULL\n AND ($1::timestamptz IS NULL OR last_active_at >= $1)\n AND last_active_at < $2\n ORDER BY last_active_at ASC\n LIMIT $3\n FOR UPDATE\n ),\n updated AS (\n UPDATE user_sessions\n SET last_active_ip = NULL\n FROM to_update\n WHERE user_sessions.user_session_id = to_update.user_session_id\n RETURNING user_sessions.last_active_at\n )\n SELECT COUNT(*) AS \"count!\", MAX(last_active_at) AS last_active_at FROM updated\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_active_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "535225206622b9190ccf42f7d66268818dc84c37b168ab45e582e0a727796a06" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-53ad718642644b47a2d49f768d81bd993088526923769a9147281686c2d47591.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-53ad718642644b47a2d49f768d81bd993088526923769a9147281686c2d47591.json new file mode 100644 index 00000000..f18a3a52 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-53ad718642644b47a2d49f768d81bd993088526923769a9147281686c2d47591.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT compat_access_token_id\n , access_token\n , created_at\n , expires_at\n , compat_session_id\n\n FROM compat_access_tokens\n\n WHERE access_token = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "compat_access_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "access_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "compat_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + false + ] + }, + "hash": "53ad718642644b47a2d49f768d81bd993088526923769a9147281686c2d47591" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5402b8ddb674d05319830477eb3e72ecb536092b46c92a7dda01598962842323.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5402b8ddb674d05319830477eb3e72ecb536092b46c92a7dda01598962842323.json new file mode 100644 index 00000000..ae378948 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5402b8ddb674d05319830477eb3e72ecb536092b46c92a7dda01598962842323.json @@ -0,0 +1,53 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n upstream_oauth_link_id,\n upstream_oauth_provider_id,\n user_id,\n subject,\n human_account_name,\n created_at\n FROM upstream_oauth_links\n WHERE upstream_oauth_provider_id = $1\n AND subject = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "upstream_oauth_link_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "upstream_oauth_provider_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "subject", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "human_account_name", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Text" + ] + }, + "nullable": [ + false, + false, + true, + false, + true, + false + ] + }, + "hash": "5402b8ddb674d05319830477eb3e72ecb536092b46c92a7dda01598962842323" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-55bc51efddf7a1cf06610fdb20d46beca29964733338ea4fec2a29393f031c4f.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-55bc51efddf7a1cf06610fdb20d46beca29964733338ea4fec2a29393f031c4f.json new file mode 100644 index 00000000..de0a51e4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-55bc51efddf7a1cf06610fdb20d46beca29964733338ea4fec2a29393f031c4f.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE compat_sessions\n SET last_active_at = GREATEST(t.last_active_at, compat_sessions.last_active_at)\n , last_active_ip = COALESCE(t.last_active_ip, compat_sessions.last_active_ip)\n FROM (\n SELECT *\n FROM UNNEST($1::uuid[], $2::timestamptz[], $3::inet[])\n AS t(compat_session_id, last_active_at, last_active_ip)\n ) AS t\n WHERE compat_sessions.compat_session_id = t.compat_session_id\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "TimestamptzArray", + "InetArray" + ] + }, + "nullable": [] + }, + "hash": "55bc51efddf7a1cf06610fdb20d46beca29964733338ea4fec2a29393f031c4f" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-572ead41d62cfbe40e6f0c8edf6928e8eebd99036255b62d688ac02b5bd74b40.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-572ead41d62cfbe40e6f0c8edf6928e8eebd99036255b62d688ac02b5bd74b40.json new file mode 100644 index 00000000..15527718 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-572ead41d62cfbe40e6f0c8edf6928e8eebd99036255b62d688ac02b5bd74b40.json @@ -0,0 +1,94 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT s.user_session_id\n , s.created_at AS \"user_session_created_at\"\n , s.finished_at AS \"user_session_finished_at\"\n , s.user_agent AS \"user_session_user_agent\"\n , s.last_active_at AS \"user_session_last_active_at\"\n , s.last_active_ip AS \"user_session_last_active_ip: IpAddr\"\n , u.user_id\n , u.username AS \"user_username\"\n , u.created_at AS \"user_created_at\"\n , u.locked_at AS \"user_locked_at\"\n , u.deactivated_at AS \"user_deactivated_at\"\n , u.can_request_admin AS \"user_can_request_admin\"\n , u.is_guest AS \"user_is_guest\"\n FROM user_sessions s\n INNER JOIN users u\n USING (user_id)\n WHERE s.user_session_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "user_session_created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 2, + "name": "user_session_finished_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "user_session_user_agent", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "user_session_last_active_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "user_session_last_active_ip: IpAddr", + "type_info": "Inet" + }, + { + "ordinal": 6, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "user_username", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "user_created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "user_locked_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "user_deactivated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 11, + "name": "user_can_request_admin", + "type_info": "Bool" + }, + { + "ordinal": 12, + "name": "user_is_guest", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + true, + true, + true, + true, + false, + false, + false, + true, + true, + false, + false + ] + }, + "hash": "572ead41d62cfbe40e6f0c8edf6928e8eebd99036255b62d688ac02b5bd74b40" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5a6b91660e4c12b4a1fe2cad08e727a305cbe4029cd4cebd5ecc274e3e32f533.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5a6b91660e4c12b4a1fe2cad08e727a305cbe4029cd4cebd5ecc274e3e32f533.json new file mode 100644 index 00000000..e1aa9740 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5a6b91660e4c12b4a1fe2cad08e727a305cbe4029cd4cebd5ecc274e3e32f533.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE upstream_oauth_authorization_sessions\n SET consumed_at = $1,\n user_session_id = $2\n WHERE upstream_oauth_authorization_session_id = $3\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "5a6b91660e4c12b4a1fe2cad08e727a305cbe4029cd4cebd5ecc274e3e32f533" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5b21644dd3c094b0f2f8babb2c730554dc067d0a6cad963dd7e0c66a80b342bf.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5b21644dd3c094b0f2f8babb2c730554dc067d0a6cad963dd7e0c66a80b342bf.json new file mode 100644 index 00000000..ea5a5fb0 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5b21644dd3c094b0f2f8babb2c730554dc067d0a6cad963dd7e0c66a80b342bf.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE queue_schedules\n SET last_scheduled_at = $1,\n last_scheduled_job_id = $2\n WHERE last_scheduled_job_id = $3\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "5b21644dd3c094b0f2f8babb2c730554dc067d0a6cad963dd7e0c66a80b342bf" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5b697dd7834d33ec55972d3ba43d25fe794bc0b69c5938275711faa7a80b811f.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5b697dd7834d33ec55972d3ba43d25fe794bc0b69c5938275711faa7a80b811f.json new file mode 100644 index 00000000..88b4d941 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5b697dd7834d33ec55972d3ba43d25fe794bc0b69c5938275711faa7a80b811f.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM oauth2_refresh_tokens\n WHERE oauth2_session_id IN (\n SELECT oauth2_session_id\n FROM oauth2_sessions\n WHERE oauth2_client_id = $1\n )\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "5b697dd7834d33ec55972d3ba43d25fe794bc0b69c5938275711faa7a80b811f" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5d0d4699aa82b3976c6c1fcb0d77559da26def223b8954cf32959cce777577d7.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5d0d4699aa82b3976c6c1fcb0d77559da26def223b8954cf32959cce777577d7.json new file mode 100644 index 00000000..b712d9e6 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5d0d4699aa82b3976c6c1fcb0d77559da26def223b8954cf32959cce777577d7.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH to_delete AS (\n SELECT queue_job_id\n FROM queue_jobs\n WHERE (status = 'completed' OR status = 'failed')\n AND ($1::uuid IS NULL OR queue_job_id > $1)\n AND queue_job_id <= $2\n ORDER BY queue_job_id\n LIMIT $3\n )\n DELETE FROM queue_jobs\n USING to_delete\n WHERE queue_jobs.queue_job_id = to_delete.queue_job_id\n RETURNING queue_jobs.queue_job_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "queue_job_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "5d0d4699aa82b3976c6c1fcb0d77559da26def223b8954cf32959cce777577d7" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5da7a197e0008f100ad4daa78f4aa6515f0fc9eb54075e8d6d15520d25b75172.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5da7a197e0008f100ad4daa78f4aa6515f0fc9eb54075e8d6d15520d25b75172.json new file mode 100644 index 00000000..56298e4d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5da7a197e0008f100ad4daa78f4aa6515f0fc9eb54075e8d6d15520d25b75172.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_sessions\n SET finished_at = $4\n WHERE user_id = $1\n AND ($2 = ANY(scope_list) OR $3 = ANY(scope_list))\n AND finished_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "5da7a197e0008f100ad4daa78f4aa6515f0fc9eb54075e8d6d15520d25b75172" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5eea2f4c3e82ae606b09b8a81332594c97ba0afe972f0fee145b6094789fb6c7.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5eea2f4c3e82ae606b09b8a81332594c97ba0afe972f0fee145b6094789fb6c7.json new file mode 100644 index 00000000..3d81b914 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5eea2f4c3e82ae606b09b8a81332594c97ba0afe972f0fee145b6094789fb6c7.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_email_id\n , user_id\n , email\n , created_at\n FROM user_emails\n WHERE LOWER(email) = LOWER($1)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_email_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false + ] + }, + "hash": "5eea2f4c3e82ae606b09b8a81332594c97ba0afe972f0fee145b6094789fb6c7" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5f2199865fae3a969bb37429dd70dc74505b22c681322bd99b62c2a540c6cd35.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5f2199865fae3a969bb37429dd70dc74505b22c681322bd99b62c2a540c6cd35.json new file mode 100644 index 00000000..364a1c6b --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5f2199865fae3a969bb37429dd70dc74505b22c681322bd99b62c2a540c6cd35.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE queue_workers\n SET shutdown_at = $2\n WHERE queue_worker_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "5f2199865fae3a969bb37429dd70dc74505b22c681322bd99b62c2a540c6cd35" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-5fe1bb569d13a7d3ff22887b3fc5b76ff901c183b314f8ccb5018d70c516abf6.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5fe1bb569d13a7d3ff22887b3fc5b76ff901c183b314f8ccb5018d70c516abf6.json new file mode 100644 index 00000000..864809d6 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-5fe1bb569d13a7d3ff22887b3fc5b76ff901c183b314f8ccb5018d70c516abf6.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM oauth2_clients\n WHERE oauth2_client_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "5fe1bb569d13a7d3ff22887b3fc5b76ff901c183b314f8ccb5018d70c516abf6" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-607262ccf28b672df51e4e5d371e5cc5119a7d6e7fe784112703c0406f28300f.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-607262ccf28b672df51e4e5d371e5cc5119a7d6e7fe784112703c0406f28300f.json new file mode 100644 index 00000000..3efb2e7f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-607262ccf28b672df51e4e5d371e5cc5119a7d6e7fe784112703c0406f28300f.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n user_recovery_ticket_id\n , user_recovery_session_id\n , user_email_id\n , ticket\n , created_at\n , expires_at\n FROM user_recovery_tickets\n WHERE ticket = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_recovery_ticket_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "user_recovery_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_email_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "ticket", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "expires_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false + ] + }, + "hash": "607262ccf28b672df51e4e5d371e5cc5119a7d6e7fe784112703c0406f28300f" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-608366f45ecaf392ab69cddb12252b5efcc103c3383fa68b552295e2289d1f55.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-608366f45ecaf392ab69cddb12252b5efcc103c3383fa68b552295e2289d1f55.json new file mode 100644 index 00000000..4076e098 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-608366f45ecaf392ab69cddb12252b5efcc103c3383fa68b552295e2289d1f55.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_session_authentications\n (user_session_authentication_id, user_session_id, created_at, user_password_id)\n VALUES ($1, $2, $3, $4)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "608366f45ecaf392ab69cddb12252b5efcc103c3383fa68b552295e2289d1f55" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-623097fc45ffa5d6e09fedfbdbe5e42662e9854430bcd9e53598debf99c9ca37.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-623097fc45ffa5d6e09fedfbdbe5e42662e9854430bcd9e53598debf99c9ca37.json new file mode 100644 index 00000000..27b69adf --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-623097fc45ffa5d6e09fedfbdbe5e42662e9854430bcd9e53598debf99c9ca37.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n to_delete AS (\n SELECT upstream_oauth_link_id\n FROM upstream_oauth_links\n WHERE user_id IS NULL\n AND ($1::uuid IS NULL OR upstream_oauth_link_id > $1)\n AND upstream_oauth_link_id <= $2\n ORDER BY upstream_oauth_link_id\n LIMIT $3\n ),\n deleted_sessions AS (\n DELETE FROM upstream_oauth_authorization_sessions\n USING to_delete\n WHERE upstream_oauth_authorization_sessions.upstream_oauth_link_id = to_delete.upstream_oauth_link_id\n )\n DELETE FROM upstream_oauth_links\n USING to_delete\n WHERE upstream_oauth_links.upstream_oauth_link_id = to_delete.upstream_oauth_link_id\n RETURNING upstream_oauth_links.upstream_oauth_link_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "upstream_oauth_link_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "623097fc45ffa5d6e09fedfbdbe5e42662e9854430bcd9e53598debf99c9ca37" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-64b6e274e2bed6814f5ae41ddf57093589f7d1b2b8458521b635546b8012041e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-64b6e274e2bed6814f5ae41ddf57093589f7d1b2b8458521b635546b8012041e.json new file mode 100644 index 00000000..6b2e85bf --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-64b6e274e2bed6814f5ae41ddf57093589f7d1b2b8458521b635546b8012041e.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE personal_sessions\n SET last_active_at = GREATEST(t.last_active_at, personal_sessions.last_active_at)\n , last_active_ip = COALESCE(t.last_active_ip, personal_sessions.last_active_ip)\n FROM (\n SELECT *\n FROM UNNEST($1::uuid[], $2::timestamptz[], $3::inet[])\n AS t(personal_session_id, last_active_at, last_active_ip)\n ) AS t\n WHERE personal_sessions.personal_session_id = t.personal_session_id\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "TimestamptzArray", + "InetArray" + ] + }, + "nullable": [] + }, + "hash": "64b6e274e2bed6814f5ae41ddf57093589f7d1b2b8458521b635546b8012041e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792.json new file mode 100644 index 00000000..6bd2768c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792.json @@ -0,0 +1,166 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n client_id,\n encrypted_client_secret,\n token_endpoint_signing_alg,\n token_endpoint_auth_method,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n created_at,\n disabled_at,\n claims_imports as \"claims_imports: Json\",\n jwks_uri_override,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n additional_parameters as \"additional_parameters: Json>\",\n forward_login_hint,\n on_backchannel_logout\n FROM upstream_oauth_providers\n WHERE upstream_oauth_provider_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "upstream_oauth_provider_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "issuer", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "human_name", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "brand_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "scope", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "client_id", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "encrypted_client_secret", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "token_endpoint_signing_alg", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "token_endpoint_auth_method", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "id_token_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "fetch_userinfo", + "type_info": "Bool" + }, + { + "ordinal": 11, + "name": "userinfo_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 13, + "name": "disabled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 14, + "name": "claims_imports: Json", + "type_info": "Jsonb" + }, + { + "ordinal": 15, + "name": "jwks_uri_override", + "type_info": "Text" + }, + { + "ordinal": 16, + "name": "authorization_endpoint_override", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "token_endpoint_override", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "userinfo_endpoint_override", + "type_info": "Text" + }, + { + "ordinal": 19, + "name": "discovery_mode", + "type_info": "Text" + }, + { + "ordinal": 20, + "name": "pkce_mode", + "type_info": "Text" + }, + { + "ordinal": 21, + "name": "response_mode", + "type_info": "Text" + }, + { + "ordinal": 22, + "name": "additional_parameters: Json>", + "type_info": "Jsonb" + }, + { + "ordinal": 23, + "name": "forward_login_hint", + "type_info": "Bool" + }, + { + "ordinal": 24, + "name": "on_backchannel_logout", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + true, + true, + true, + false, + false, + true, + true, + false, + false, + false, + true, + false, + true, + false, + true, + true, + true, + true, + false, + false, + true, + true, + false, + false + ] + }, + "hash": "6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-66693f31eff5673e88ca516ee727a709b06455e08b9fd75cc08f142070f330b3.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-66693f31eff5673e88ca516ee727a709b06455e08b9fd75cc08f142070f330b3.json new file mode 100644 index 00000000..716bbf8c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-66693f31eff5673e88ca516ee727a709b06455e08b9fd75cc08f142070f330b3.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_refresh_tokens\n SET revoked_at = $2\n WHERE oauth2_refresh_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "66693f31eff5673e88ca516ee727a709b06455e08b9fd75cc08f142070f330b3" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-67cd4880d84b38f20c3960789934d55cbfb01492985ac2af5a1ad4af9b3ccc77.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-67cd4880d84b38f20c3960789934d55cbfb01492985ac2af5a1ad4af9b3ccc77.json new file mode 100644 index 00000000..1d739df3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-67cd4880d84b38f20c3960789934d55cbfb01492985ac2af5a1ad4af9b3ccc77.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO queue_leader (elected_at, expires_at, queue_worker_id)\n VALUES ($1, NOW() + INTERVAL '5 seconds', $2)\n ON CONFLICT (active)\n DO UPDATE SET expires_at = EXCLUDED.expires_at\n WHERE queue_leader.queue_worker_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "67cd4880d84b38f20c3960789934d55cbfb01492985ac2af5a1ad4af9b3ccc77" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-6b8d28b76d7ab33178b46dbb28c11e41d86f22b3fa899a952cad00129e59bee6.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6b8d28b76d7ab33178b46dbb28c11e41d86f22b3fa899a952cad00129e59bee6.json new file mode 100644 index 00000000..a7b95fc9 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6b8d28b76d7ab33178b46dbb28c11e41d86f22b3fa899a952cad00129e59bee6.json @@ -0,0 +1,82 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_session_id\n , user_id\n , user_session_id\n , oauth2_client_id\n , scope_list\n , created_at\n , finished_at\n , user_agent\n , last_active_at\n , last_active_ip as \"last_active_ip: IpAddr\"\n , human_name\n FROM oauth2_sessions\n\n WHERE oauth2_session_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 4, + "name": "scope_list", + "type_info": "TextArray" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "finished_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "user_agent", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "last_active_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "last_active_ip: IpAddr", + "type_info": "Inet" + }, + { + "ordinal": 10, + "name": "human_name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + true, + true, + false, + false, + false, + true, + true, + true, + true, + true + ] + }, + "hash": "6b8d28b76d7ab33178b46dbb28c11e41d86f22b3fa899a952cad00129e59bee6" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-6bd38759f569fcf972924d12f565b531b9873f4139eadcbf1450e726b9a27379.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6bd38759f569fcf972924d12f565b531b9873f4139eadcbf1450e726b9a27379.json new file mode 100644 index 00000000..4898fc43 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6bd38759f569fcf972924d12f565b531b9873f4139eadcbf1450e726b9a27379.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE queue_workers\n SET shutdown_at = $1\n WHERE shutdown_at IS NULL\n AND last_seen_at < $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "6bd38759f569fcf972924d12f565b531b9873f4139eadcbf1450e726b9a27379" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-6d71188dffc492ddc8f7f21476516d3b08fd5d736ecf36845e6fd4bfc515b2cf.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6d71188dffc492ddc8f7f21476516d3b08fd5d736ecf36845e6fd4bfc515b2cf.json new file mode 100644 index 00000000..1acfa278 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6d71188dffc492ddc8f7f21476516d3b08fd5d736ecf36845e6fd4bfc515b2cf.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_refresh_token_id\n , refresh_token\n , created_at\n , consumed_at\n , revoked_at\n , oauth2_access_token_id\n , oauth2_session_id\n , next_oauth2_refresh_token_id\n FROM oauth2_refresh_tokens\n\n WHERE oauth2_refresh_token_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_refresh_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "refresh_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "consumed_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "revoked_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "oauth2_access_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "oauth2_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "next_oauth2_refresh_token_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + false, + true + ] + }, + "hash": "6d71188dffc492ddc8f7f21476516d3b08fd5d736ecf36845e6fd4bfc515b2cf" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-6db23fc9c39c2c7d9224d4e1233205f636568c990ccb05cf9208750ad1330b9b.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6db23fc9c39c2c7d9224d4e1233205f636568c990ccb05cf9208750ad1330b9b.json new file mode 100644 index 00000000..7de368da --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6db23fc9c39c2c7d9224d4e1233205f636568c990ccb05cf9208750ad1330b9b.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH to_delete AS (\n SELECT upstream_oauth_authorization_session_id\n FROM upstream_oauth_authorization_sessions\n WHERE ($1::uuid IS NULL OR upstream_oauth_authorization_session_id > $1)\n AND upstream_oauth_authorization_session_id <= $2\n AND user_session_id IS NULL\n ORDER BY upstream_oauth_authorization_session_id\n LIMIT $3\n )\n DELETE FROM upstream_oauth_authorization_sessions\n USING to_delete\n WHERE upstream_oauth_authorization_sessions.upstream_oauth_authorization_session_id = to_delete.upstream_oauth_authorization_session_id\n RETURNING upstream_oauth_authorization_sessions.upstream_oauth_authorization_session_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "upstream_oauth_authorization_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "6db23fc9c39c2c7d9224d4e1233205f636568c990ccb05cf9208750ad1330b9b" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-6e21e7d816f806da9bb5176931bdb550dee05c44c9d93f53df95fe3b4a840347.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6e21e7d816f806da9bb5176931bdb550dee05c44c9d93f53df95fe3b4a840347.json new file mode 100644 index 00000000..225baae3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6e21e7d816f806da9bb5176931bdb550dee05c44c9d93f53df95fe3b4a840347.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO compat_sso_logins\n (compat_sso_login_id, login_token, redirect_uri, created_at)\n VALUES ($1, $2, $3, $4)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "6e21e7d816f806da9bb5176931bdb550dee05c44c9d93f53df95fe3b4a840347" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-6ecad60e565367a6cfa539b4c32dabe674ea853e0d47eb5c713705cb0130c758.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6ecad60e565367a6cfa539b4c32dabe674ea853e0d47eb5c713705cb0130c758.json new file mode 100644 index 00000000..564800a2 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6ecad60e565367a6cfa539b4c32dabe674ea853e0d47eb5c713705cb0130c758.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "\n NOTIFY queue_leader_stepdown\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "6ecad60e565367a6cfa539b4c32dabe674ea853e0d47eb5c713705cb0130c758" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-6f97b5f9ad0d4d15387150bea3839fb7f81015f7ceef61ecaadba64521895cff.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6f97b5f9ad0d4d15387150bea3839fb7f81015f7ceef61ecaadba64521895cff.json new file mode 100644 index 00000000..88398615 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-6f97b5f9ad0d4d15387150bea3839fb7f81015f7ceef61ecaadba64521895cff.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_passwords\n (user_password_id, user_id, hashed_password, version, upgraded_from_id, created_at)\n VALUES ($1, $2, $3, $4, $5, $6)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Int4", + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "6f97b5f9ad0d4d15387150bea3839fb7f81015f7ceef61ecaadba64521895cff" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-707d78340069627aba9f18bbe5ac1388d6723f82549d88d704d9c939b9d35c49.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-707d78340069627aba9f18bbe5ac1388d6723f82549d88d704d9c939b9d35c49.json new file mode 100644 index 00000000..88eb81f9 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-707d78340069627aba9f18bbe5ac1388d6723f82549d88d704d9c939b9d35c49.json @@ -0,0 +1,49 @@ +{ + "db_name": "PostgreSQL", + "query": "\n -- We first grab a few jobs that are available,\n -- using a FOR UPDATE SKIP LOCKED so that this can be run concurrently\n -- and we don't get multiple workers grabbing the same jobs\n WITH locked_jobs AS (\n SELECT queue_job_id\n FROM queue_jobs\n WHERE\n status = 'available'\n AND queue_name = ANY($1)\n ORDER BY queue_job_id ASC\n LIMIT $2\n FOR UPDATE\n SKIP LOCKED\n )\n -- then we update the status of those jobs to 'running', returning the job details\n UPDATE queue_jobs\n SET status = 'running', started_at = $3, started_by = $4\n FROM locked_jobs\n WHERE queue_jobs.queue_job_id = locked_jobs.queue_job_id\n RETURNING\n queue_jobs.queue_job_id,\n queue_jobs.queue_name,\n queue_jobs.payload,\n queue_jobs.metadata,\n queue_jobs.attempt\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "queue_job_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "queue_name", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "payload", + "type_info": "Jsonb" + }, + { + "ordinal": 3, + "name": "metadata", + "type_info": "Jsonb" + }, + { + "ordinal": 4, + "name": "attempt", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "TextArray", + "Int8", + "Timestamptz", + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "707d78340069627aba9f18bbe5ac1388d6723f82549d88d704d9c939b9d35c49" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-7189b6136fd08ac9ae7c51bff06fb2254d1bf9e8a97cd7d32ba789c740e0fbdb.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7189b6136fd08ac9ae7c51bff06fb2254d1bf9e8a97cd7d32ba789c740e0fbdb.json new file mode 100644 index 00000000..a0695db4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7189b6136fd08ac9ae7c51bff06fb2254d1bf9e8a97cd7d32ba789c740e0fbdb.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_access_tokens\n SET first_used_at = $2\n WHERE oauth2_access_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "7189b6136fd08ac9ae7c51bff06fb2254d1bf9e8a97cd7d32ba789c740e0fbdb" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-755f62d0a3a40acc90037371339a8459736fdd4bbffd932f7930d847f2c3ef5d.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-755f62d0a3a40acc90037371339a8459736fdd4bbffd932f7930d847f2c3ef5d.json new file mode 100644 index 00000000..3254f116 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-755f62d0a3a40acc90037371339a8459736fdd4bbffd932f7930d847f2c3ef5d.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_device_code_grant\n SET rejected_at = $1\n , user_session_id = $2\n WHERE oauth2_device_code_grant_id = $3\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "755f62d0a3a40acc90037371339a8459736fdd4bbffd932f7930d847f2c3ef5d" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-75a62d170e4c959a14c5698f1da983113e7d1bc565d01e85c158856abb17ddc6.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-75a62d170e4c959a14c5698f1da983113e7d1bc565d01e85c158856abb17ddc6.json new file mode 100644 index 00000000..1a1320b7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-75a62d170e4c959a14c5698f1da983113e7d1bc565d01e85c158856abb17ddc6.json @@ -0,0 +1,100 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_device_code_grant_id\n , oauth2_client_id\n , scope\n , device_code\n , user_code\n , created_at\n , expires_at\n , fulfilled_at\n , rejected_at\n , exchanged_at\n , user_session_id\n , oauth2_session_id\n , ip_address as \"ip_address: IpAddr\"\n , user_agent\n FROM\n oauth2_device_code_grant\n\n WHERE device_code = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_device_code_grant_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "scope", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "device_code", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "user_code", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "fulfilled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "rejected_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "exchanged_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "user_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 11, + "name": "oauth2_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 12, + "name": "ip_address: IpAddr", + "type_info": "Inet" + }, + { + "ordinal": 13, + "name": "user_agent", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "75a62d170e4c959a14c5698f1da983113e7d1bc565d01e85c158856abb17ddc6" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-77dfa9fae1a9c77b70476d7da19d3313a02886994cfff0690451229fb5ae2f77.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-77dfa9fae1a9c77b70476d7da19d3313a02886994cfff0690451229fb5ae2f77.json new file mode 100644 index 00000000..d48d5572 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-77dfa9fae1a9c77b70476d7da19d3313a02886994cfff0690451229fb5ae2f77.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT compat_access_token_id\n , access_token\n , created_at\n , expires_at\n , compat_session_id\n\n FROM compat_access_tokens\n\n WHERE compat_access_token_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "compat_access_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "access_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "compat_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + false + ] + }, + "hash": "77dfa9fae1a9c77b70476d7da19d3313a02886994cfff0690451229fb5ae2f77" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-785e6bceed803cb1caccc373cde0c999d601f3a9730e6bbb40cfc43c04195c61.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-785e6bceed803cb1caccc373cde0c999d601f3a9730e6bbb40cfc43c04195c61.json new file mode 100644 index 00000000..f09603b2 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-785e6bceed803cb1caccc373cde0c999d601f3a9730e6bbb40cfc43c04195c61.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n upstream_oauth_link_id,\n upstream_oauth_provider_id,\n user_id,\n subject,\n human_account_name,\n created_at\n FROM upstream_oauth_links\n WHERE upstream_oauth_link_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "upstream_oauth_link_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "upstream_oauth_provider_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "subject", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "human_account_name", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + true, + false, + true, + false + ] + }, + "hash": "785e6bceed803cb1caccc373cde0c999d601f3a9730e6bbb40cfc43c04195c61" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-7a0641df5058927c5cd67d4cdaa59fe609112afbabcbfcc0e7f96c1e531b6567.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7a0641df5058927c5cd67d4cdaa59fe609112afbabcbfcc0e7f96c1e531b6567.json new file mode 100644 index 00000000..22c3bc0e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7a0641df5058927c5cd67d4cdaa59fe609112afbabcbfcc0e7f96c1e531b6567.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO oauth2_authorization_grants (\n oauth2_authorization_grant_id,\n oauth2_client_id,\n redirect_uri,\n scope,\n state,\n nonce,\n response_mode,\n code_challenge,\n code_challenge_method,\n response_type_code,\n response_type_id_token,\n authorization_code,\n login_hint,\n locale,\n created_at\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Bool", + "Bool", + "Text", + "Text", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "7a0641df5058927c5cd67d4cdaa59fe609112afbabcbfcc0e7f96c1e531b6567" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-7b06e6f21c69056b526538f06f06268efd13d7af3cecb452168d514a379fec30.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7b06e6f21c69056b526538f06f06268efd13d7af3cecb452168d514a379fec30.json new file mode 100644 index 00000000..a2c99a75 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7b06e6f21c69056b526538f06f06268efd13d7af3cecb452168d514a379fec30.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH to_update AS (\n SELECT oauth2_session_id, last_active_at\n FROM oauth2_sessions\n WHERE last_active_ip IS NOT NULL\n AND last_active_at IS NOT NULL\n AND ($1::timestamptz IS NULL OR last_active_at >= $1)\n AND last_active_at < $2\n ORDER BY last_active_at ASC\n LIMIT $3\n FOR UPDATE\n ),\n updated AS (\n UPDATE oauth2_sessions\n SET last_active_ip = NULL\n FROM to_update\n WHERE oauth2_sessions.oauth2_session_id = to_update.oauth2_session_id\n RETURNING oauth2_sessions.last_active_at\n )\n SELECT COUNT(*) AS \"count!\", MAX(last_active_at) AS last_active_at FROM updated\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_active_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "7b06e6f21c69056b526538f06f06268efd13d7af3cecb452168d514a379fec30" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-7ce387b1b0aaf10e72adde667b19521b66eaafa51f73bf2f95e38b8f3b64a229.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7ce387b1b0aaf10e72adde667b19521b66eaafa51f73bf2f95e38b8f3b64a229.json new file mode 100644 index 00000000..464d6eda --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7ce387b1b0aaf10e72adde667b19521b66eaafa51f73bf2f95e38b8f3b64a229.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE upstream_oauth_links\n SET user_id = $1\n WHERE upstream_oauth_link_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "7ce387b1b0aaf10e72adde667b19521b66eaafa51f73bf2f95e38b8f3b64a229" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-7e367e416d18fcf9b227bf053421410b4b7b4af441f0a138c5421d1111cb9f79.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7e367e416d18fcf9b227bf053421410b4b7b4af441f0a138c5421d1111cb9f79.json new file mode 100644 index 00000000..a6a02b32 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7e367e416d18fcf9b227bf053421410b4b7b4af441f0a138c5421d1111cb9f79.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_email_authentication_id\n , user_session_id\n , user_registration_id\n , email\n , created_at\n , completed_at\n FROM user_email_authentications\n WHERE user_email_authentication_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_email_authentication_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "user_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_registration_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "completed_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + true, + true, + false, + false, + true + ] + }, + "hash": "7e367e416d18fcf9b227bf053421410b4b7b4af441f0a138c5421d1111cb9f79" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-7e414c29745cf5c85fa4e7cb5d661b07f43ab168956470d120166ed7eab631d9.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7e414c29745cf5c85fa4e7cb5d661b07f43ab168956470d120166ed7eab631d9.json new file mode 100644 index 00000000..275a0895 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7e414c29745cf5c85fa4e7cb5d661b07f43ab168956470d120166ed7eab631d9.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registration_tokens\n SET usage_limit = $2\n WHERE user_registration_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "7e414c29745cf5c85fa4e7cb5d661b07f43ab168956470d120166ed7eab631d9" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-7f4c4634ada4dc2745530dcca8eee92abf78dfbdf1a25e58a2bc9c14be8035f0.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7f4c4634ada4dc2745530dcca8eee92abf78dfbdf1a25e58a2bc9c14be8035f0.json new file mode 100644 index 00000000..e9af498f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7f4c4634ada4dc2745530dcca8eee92abf78dfbdf1a25e58a2bc9c14be8035f0.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO users (user_id, username, created_at)\n VALUES ($1, $2, $3)\n ON CONFLICT (username) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "7f4c4634ada4dc2745530dcca8eee92abf78dfbdf1a25e58a2bc9c14be8035f0" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-7f8335cc94347bc3a15afe7051658659347a1bf71dd62335df046708f19c967e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7f8335cc94347bc3a15afe7051658659347a1bf71dd62335df046708f19c967e.json new file mode 100644 index 00000000..9567c155 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-7f8335cc94347bc3a15afe7051658659347a1bf71dd62335df046708f19c967e.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT EXISTS(\n SELECT 1 FROM users WHERE LOWER(username) = LOWER($1)\n ) AS \"exists!\"\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "exists!", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + null + ] + }, + "hash": "7f8335cc94347bc3a15afe7051658659347a1bf71dd62335df046708f19c967e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-8275a440640ea28fd8f82e7df672e45a6eba981a0d621665ed8f8b60354b3389.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8275a440640ea28fd8f82e7df672e45a6eba981a0d621665ed8f8b60354b3389.json new file mode 100644 index 00000000..ee2e27cd --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8275a440640ea28fd8f82e7df672e45a6eba981a0d621665ed8f8b60354b3389.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_recovery_sessions (\n user_recovery_session_id\n , email\n , user_agent\n , ip_address\n , locale\n , created_at\n )\n VALUES ($1, $2, $3, $4, $5, $6)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text", + "Inet", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "8275a440640ea28fd8f82e7df672e45a6eba981a0d621665ed8f8b60354b3389" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-83d1b0720dfde3209d77f1142aa19359913b8a934ca8a642b7bb43c9a7a58a6d.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-83d1b0720dfde3209d77f1142aa19359913b8a934ca8a642b7bb43c9a7a58a6d.json new file mode 100644 index 00000000..a5899aa2 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-83d1b0720dfde3209d77f1142aa19359913b8a934ca8a642b7bb43c9a7a58a6d.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registrations\n SET completed_at = $2\n WHERE user_registration_id = $1 AND completed_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "83d1b0720dfde3209d77f1142aa19359913b8a934ca8a642b7bb43c9a7a58a6d" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-860e01cd660b450439d63c5ee31ade59f478b0b096b4bc90c89fb9c26b467dd2.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-860e01cd660b450439d63c5ee31ade59f478b0b096b4bc90c89fb9c26b467dd2.json new file mode 100644 index 00000000..a8909316 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-860e01cd660b450439d63c5ee31ade59f478b0b096b4bc90c89fb9c26b467dd2.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registrations\n SET user_registration_token_id = $2\n WHERE user_registration_id = $1 AND completed_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "860e01cd660b450439d63c5ee31ade59f478b0b096b4bc90c89fb9c26b467dd2" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-875294dc5cf87bcf302fb9e87933745cc1c57bbe3c3c69110592a07400116c7f.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-875294dc5cf87bcf302fb9e87933745cc1c57bbe3c3c69110592a07400116c7f.json new file mode 100644 index 00000000..b82d7c46 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-875294dc5cf87bcf302fb9e87933745cc1c57bbe3c3c69110592a07400116c7f.json @@ -0,0 +1,58 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_access_token_id\n , access_token\n , created_at\n , expires_at\n , revoked_at\n , oauth2_session_id\n , first_used_at\n\n FROM oauth2_access_tokens\n\n WHERE oauth2_access_token_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_access_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "access_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "revoked_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "oauth2_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "first_used_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + true + ] + }, + "hash": "875294dc5cf87bcf302fb9e87933745cc1c57bbe3c3c69110592a07400116c7f" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-89041298e272d15c21e2b7127bd16c5a4f48e2be87dc26e9d0e3a932c9c49dfb.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-89041298e272d15c21e2b7127bd16c5a4f48e2be87dc26e9d0e3a932c9c49dfb.json new file mode 100644 index 00000000..4b11059e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-89041298e272d15c21e2b7127bd16c5a4f48e2be87dc26e9d0e3a932c9c49dfb.json @@ -0,0 +1,100 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_device_code_grant_id\n , oauth2_client_id\n , scope\n , device_code\n , user_code\n , created_at\n , expires_at\n , fulfilled_at\n , rejected_at\n , exchanged_at\n , user_session_id\n , oauth2_session_id\n , ip_address as \"ip_address: IpAddr\"\n , user_agent\n FROM\n oauth2_device_code_grant\n\n WHERE oauth2_device_code_grant_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_device_code_grant_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "scope", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "device_code", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "user_code", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "fulfilled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "rejected_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "exchanged_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "user_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 11, + "name": "oauth2_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 12, + "name": "ip_address: IpAddr", + "type_info": "Inet" + }, + { + "ordinal": 13, + "name": "user_agent", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "89041298e272d15c21e2b7127bd16c5a4f48e2be87dc26e9d0e3a932c9c49dfb" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-89edaec8661e435c3b71bb9b995cd711eb78a4d39608e897432d6124cd135938.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-89edaec8661e435c3b71bb9b995cd711eb78a4d39608e897432d6124cd135938.json new file mode 100644 index 00000000..f04a39a7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-89edaec8661e435c3b71bb9b995cd711eb78a4d39608e897432d6124cd135938.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_registration_tokens\n (user_registration_token_id, token, usage_limit, created_at, expires_at)\n VALUES ($1, $2, $3, $4, $5)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Int4", + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "89edaec8661e435c3b71bb9b995cd711eb78a4d39608e897432d6124cd135938" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-8acbdc892d44efb53529da1c2df65bea6b799a43cf4c9264a37d392847e6eff0.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8acbdc892d44efb53529da1c2df65bea6b799a43cf4c9264a37d392847e6eff0.json new file mode 100644 index 00000000..7cf1c4f6 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8acbdc892d44efb53529da1c2df65bea6b799a43cf4c9264a37d392847e6eff0.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM oauth2_sessions\n WHERE oauth2_client_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "8acbdc892d44efb53529da1c2df65bea6b799a43cf4c9264a37d392847e6eff0" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-8afada5220fefb0d01ed6f87d3d0ee8fca86b5cdce9320e190e3d3b8fd9f63bc.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8afada5220fefb0d01ed6f87d3d0ee8fca86b5cdce9320e190e3d3b8fd9f63bc.json new file mode 100644 index 00000000..44352005 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8afada5220fefb0d01ed6f87d3d0ee8fca86b5cdce9320e190e3d3b8fd9f63bc.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_sessions\n SET human_name = $2\n WHERE oauth2_session_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text" + ] + }, + "nullable": [] + }, + "hash": "8afada5220fefb0d01ed6f87d3d0ee8fca86b5cdce9320e190e3d3b8fd9f63bc" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-8d240d72d651f59d53bed7380710038e9d00492b1e282237c0ec0e03bc36a9c0.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8d240d72d651f59d53bed7380710038e9d00492b1e282237c0ec0e03bc36a9c0.json new file mode 100644 index 00000000..00f736ab --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8d240d72d651f59d53bed7380710038e9d00492b1e282237c0ec0e03bc36a9c0.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_registrations\n ( user_registration_id\n , ip_address\n , user_agent\n , post_auth_action\n , username\n , created_at\n )\n VALUES ($1, $2, $3, $4, $5, $6)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Inet", + "Text", + "Jsonb", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "8d240d72d651f59d53bed7380710038e9d00492b1e282237c0ec0e03bc36a9c0" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-8ef27901b96b73826a431ad6c5fabecc18c36d8cdba8db3b47953855fa5c9035.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8ef27901b96b73826a431ad6c5fabecc18c36d8cdba8db3b47953855fa5c9035.json new file mode 100644 index 00000000..0a5d83f0 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8ef27901b96b73826a431ad6c5fabecc18c36d8cdba8db3b47953855fa5c9035.json @@ -0,0 +1,130 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_authorization_grant_id\n , created_at\n , cancelled_at\n , fulfilled_at\n , exchanged_at\n , scope\n , state\n , redirect_uri\n , response_mode\n , nonce\n , oauth2_client_id\n , authorization_code\n , response_type_code\n , response_type_id_token\n , code_challenge\n , code_challenge_method\n , login_hint\n , locale\n , oauth2_session_id\n FROM\n oauth2_authorization_grants\n\n WHERE authorization_code = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_authorization_grant_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 2, + "name": "cancelled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "fulfilled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "exchanged_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "scope", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "state", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "redirect_uri", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "response_mode", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "nonce", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 11, + "name": "authorization_code", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "response_type_code", + "type_info": "Bool" + }, + { + "ordinal": 13, + "name": "response_type_id_token", + "type_info": "Bool" + }, + { + "ordinal": 14, + "name": "code_challenge", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "code_challenge_method", + "type_info": "Text" + }, + { + "ordinal": 16, + "name": "login_hint", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "locale", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "oauth2_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + true, + true, + true, + false, + true, + false, + false, + true, + false, + true, + false, + false, + true, + true, + true, + true, + true + ] + }, + "hash": "8ef27901b96b73826a431ad6c5fabecc18c36d8cdba8db3b47953855fa5c9035" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-8ef977487429f84c557dc62272c47e411b96b2376288a90c242034295e1a147e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8ef977487429f84c557dc62272c47e411b96b2376288a90c242034295e1a147e.json new file mode 100644 index 00000000..aa2d4a1c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8ef977487429f84c557dc62272c47e411b96b2376288a90c242034295e1a147e.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH to_delete AS (\n SELECT user_recovery_session_id\n FROM user_recovery_sessions\n WHERE ($1::uuid IS NULL OR user_recovery_session_id > $1)\n AND user_recovery_session_id <= $2\n ORDER BY user_recovery_session_id\n LIMIT $3\n )\n DELETE FROM user_recovery_sessions\n USING to_delete\n WHERE user_recovery_sessions.user_recovery_session_id = to_delete.user_recovery_session_id\n RETURNING user_recovery_sessions.user_recovery_session_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_recovery_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "8ef977487429f84c557dc62272c47e411b96b2376288a90c242034295e1a147e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-8f4f071f844281fb14ecd99db3261540441b14c8206038fdc4a4336bbae3f382.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8f4f071f844281fb14ecd99db3261540441b14c8206038fdc4a4336bbae3f382.json new file mode 100644 index 00000000..304e477e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8f4f071f844281fb14ecd99db3261540441b14c8206038fdc4a4336bbae3f382.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO queue_jobs\n (queue_job_id, queue_name, payload, metadata, created_at,\n attempt, scheduled_at, schedule_name, status)\n SELECT $1, queue_name, payload, metadata, $2, attempt + 1, $3, schedule_name, 'scheduled'\n FROM queue_jobs\n WHERE queue_job_id = $4\n AND status = 'failed'\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz", + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "8f4f071f844281fb14ecd99db3261540441b14c8206038fdc4a4336bbae3f382" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-8f5ce493e8b8473ba03d5263915a8b231f9e7c211ab83487536008e48316c269.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8f5ce493e8b8473ba03d5263915a8b231f9e7c211ab83487536008e48316c269.json new file mode 100644 index 00000000..96974804 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-8f5ce493e8b8473ba03d5263915a8b231f9e7c211ab83487536008e48316c269.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registrations\n SET display_name = $2\n WHERE user_registration_id = $1 AND completed_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text" + ] + }, + "nullable": [] + }, + "hash": "8f5ce493e8b8473ba03d5263915a8b231f9e7c211ab83487536008e48316c269" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-90875bdd2f75cdf0dc3f48dc2516f5c701411387c939f6b8a3478b41b3de4f20.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-90875bdd2f75cdf0dc3f48dc2516f5c701411387c939f6b8a3478b41b3de4f20.json new file mode 100644 index 00000000..66aab4ee --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-90875bdd2f75cdf0dc3f48dc2516f5c701411387c939f6b8a3478b41b3de4f20.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT personal_access_token_id\n , personal_session_id\n , created_at\n , expires_at\n , revoked_at\n\n FROM personal_access_tokens\n\n WHERE access_token_sha256 = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "personal_access_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "personal_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "revoked_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + false, + false, + false, + true, + true + ] + }, + "hash": "90875bdd2f75cdf0dc3f48dc2516f5c701411387c939f6b8a3478b41b3de4f20" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-90fe32cb9c88a262a682c0db700fef7d69d6ce0be1f930d9f16c50b921a8b819.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-90fe32cb9c88a262a682c0db700fef7d69d6ce0be1f930d9f16c50b921a8b819.json new file mode 100644 index 00000000..a9d19cac --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-90fe32cb9c88a262a682c0db700fef7d69d6ce0be1f930d9f16c50b921a8b819.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_emails (user_email_id, user_id, email, created_at)\n VALUES ($1, $2, $3, $4)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "90fe32cb9c88a262a682c0db700fef7d69d6ce0be1f930d9f16c50b921a8b819" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-91a3ee5ad64a947b7807a590f6b014c6856229918b972b98946f98b75686ab6c.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-91a3ee5ad64a947b7807a590f6b014c6856229918b972b98946f98b75686ab6c.json new file mode 100644 index 00000000..51193254 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-91a3ee5ad64a947b7807a590f6b014c6856229918b972b98946f98b75686ab6c.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM upstream_oauth_providers\n WHERE upstream_oauth_provider_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "91a3ee5ad64a947b7807a590f6b014c6856229918b972b98946f98b75686ab6c" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-926cb81dc7931890a02c5a372aef79832e5d0748dad18ab44c6671f3196d6f60.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-926cb81dc7931890a02c5a372aef79832e5d0748dad18ab44c6671f3196d6f60.json new file mode 100644 index 00000000..4aaa3523 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-926cb81dc7931890a02c5a372aef79832e5d0748dad18ab44c6671f3196d6f60.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH to_update AS (\n SELECT compat_session_id, last_active_at\n FROM compat_sessions\n WHERE last_active_ip IS NOT NULL\n AND last_active_at IS NOT NULL\n AND ($1::timestamptz IS NULL OR last_active_at >= $1)\n AND last_active_at < $2\n ORDER BY last_active_at ASC\n LIMIT $3\n FOR UPDATE\n ),\n updated AS (\n UPDATE compat_sessions\n SET last_active_ip = NULL\n FROM to_update\n WHERE compat_sessions.compat_session_id = to_update.compat_session_id\n RETURNING compat_sessions.last_active_at\n )\n SELECT COUNT(*) AS \"count!\", MAX(last_active_at) AS last_active_at FROM updated\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_active_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "926cb81dc7931890a02c5a372aef79832e5d0748dad18ab44c6671f3196d6f60" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-92c8eb526fcc5de6874eb0fab1d71fb1ed3dafe2bd1a49aa72e4f4862931c6c2.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-92c8eb526fcc5de6874eb0fab1d71fb1ed3dafe2bd1a49aa72e4f4862931c6c2.json new file mode 100644 index 00000000..34ac29db --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-92c8eb526fcc5de6874eb0fab1d71fb1ed3dafe2bd1a49aa72e4f4862931c6c2.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_device_code_grant\n SET exchanged_at = $1\n , oauth2_session_id = $2\n WHERE oauth2_device_code_grant_id = $3\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "92c8eb526fcc5de6874eb0fab1d71fb1ed3dafe2bd1a49aa72e4f4862931c6c2" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-933d2bed9c00eb9b37bfe757266ead15df5e0a4209ff47dcf4a5f19d35154e89.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-933d2bed9c00eb9b37bfe757266ead15df5e0a4209ff47dcf4a5f19d35154e89.json new file mode 100644 index 00000000..8a040439 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-933d2bed9c00eb9b37bfe757266ead15df5e0a4209ff47dcf4a5f19d35154e89.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT compat_sso_login_id\n , login_token\n , redirect_uri\n , created_at\n , fulfilled_at\n , exchanged_at\n , compat_session_id\n , user_session_id\n\n FROM compat_sso_logins\n WHERE compat_session_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "compat_sso_login_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "login_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "redirect_uri", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "fulfilled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "exchanged_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "compat_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "user_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "933d2bed9c00eb9b37bfe757266ead15df5e0a4209ff47dcf4a5f19d35154e89" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-966ca0f7eebd2896c007b2fd6e9327d03b29fe413d57cce21c67b6d539f59e7d.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-966ca0f7eebd2896c007b2fd6e9327d03b29fe413d57cce21c67b6d539f59e7d.json new file mode 100644 index 00000000..3e1fb358 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-966ca0f7eebd2896c007b2fd6e9327d03b29fe413d57cce21c67b6d539f59e7d.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE queue_workers\n SET last_seen_at = $2\n WHERE queue_worker_id = $1 AND shutdown_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "966ca0f7eebd2896c007b2fd6e9327d03b29fe413d57cce21c67b6d539f59e7d" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-98a5491eb5f10997ac1f3718c835903ac99d9bb8ca4d79c908b25a6d1209b9b1.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-98a5491eb5f10997ac1f3718c835903ac99d9bb8ca4d79c908b25a6d1209b9b1.json new file mode 100644 index 00000000..75f013b5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-98a5491eb5f10997ac1f3718c835903ac99d9bb8ca4d79c908b25a6d1209b9b1.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE users\n SET deactivated_at = NULL\n WHERE user_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "98a5491eb5f10997ac1f3718c835903ac99d9bb8ca4d79c908b25a6d1209b9b1" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8.json new file mode 100644 index 00000000..eb1a801c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8.json @@ -0,0 +1,164 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n client_id,\n encrypted_client_secret,\n token_endpoint_signing_alg,\n token_endpoint_auth_method,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n created_at,\n disabled_at,\n claims_imports as \"claims_imports: Json\",\n jwks_uri_override,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n additional_parameters as \"additional_parameters: Json>\",\n forward_login_hint,\n on_backchannel_logout\n FROM upstream_oauth_providers\n WHERE disabled_at IS NULL\n ORDER BY ui_order ASC, upstream_oauth_provider_id ASC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "upstream_oauth_provider_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "issuer", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "human_name", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "brand_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "scope", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "client_id", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "encrypted_client_secret", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "token_endpoint_signing_alg", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "token_endpoint_auth_method", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "id_token_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "fetch_userinfo", + "type_info": "Bool" + }, + { + "ordinal": 11, + "name": "userinfo_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 13, + "name": "disabled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 14, + "name": "claims_imports: Json", + "type_info": "Jsonb" + }, + { + "ordinal": 15, + "name": "jwks_uri_override", + "type_info": "Text" + }, + { + "ordinal": 16, + "name": "authorization_endpoint_override", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "token_endpoint_override", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "userinfo_endpoint_override", + "type_info": "Text" + }, + { + "ordinal": 19, + "name": "discovery_mode", + "type_info": "Text" + }, + { + "ordinal": 20, + "name": "pkce_mode", + "type_info": "Text" + }, + { + "ordinal": 21, + "name": "response_mode", + "type_info": "Text" + }, + { + "ordinal": 22, + "name": "additional_parameters: Json>", + "type_info": "Jsonb" + }, + { + "ordinal": 23, + "name": "forward_login_hint", + "type_info": "Bool" + }, + { + "ordinal": 24, + "name": "on_backchannel_logout", + "type_info": "Text" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + true, + true, + true, + false, + false, + true, + true, + false, + false, + false, + true, + false, + true, + false, + true, + true, + true, + true, + false, + false, + true, + true, + false, + false + ] + }, + "hash": "99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-9b7363000017fa3dee46441bc0679cb16f9f8df08fa258cc907007fb9bcd0bc7.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9b7363000017fa3dee46441bc0679cb16f9f8df08fa258cc907007fb9bcd0bc7.json new file mode 100644 index 00000000..e6b0897f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9b7363000017fa3dee46441bc0679cb16f9f8df08fa258cc907007fb9bcd0bc7.json @@ -0,0 +1,82 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT compat_session_id\n , device_id\n , human_name\n , user_id\n , user_session_id\n , created_at\n , finished_at\n , is_synapse_admin\n , user_agent\n , last_active_at\n , last_active_ip as \"last_active_ip: IpAddr\"\n FROM compat_sessions\n WHERE compat_session_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "compat_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "device_id", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "human_name", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 4, + "name": "user_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "finished_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "is_synapse_admin", + "type_info": "Bool" + }, + { + "ordinal": 8, + "name": "user_agent", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "last_active_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "last_active_ip: IpAddr", + "type_info": "Inet" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + true, + true, + false, + true, + false, + true, + false, + true, + true, + true + ] + }, + "hash": "9b7363000017fa3dee46441bc0679cb16f9f8df08fa258cc907007fb9bcd0bc7" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-9c9c65d4ca6847761d8f999253590082672b3782875cf3f5ba0b2f9d26e3a507.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9c9c65d4ca6847761d8f999253590082672b3782875cf3f5ba0b2f9d26e3a507.json new file mode 100644 index 00000000..7ffc942e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9c9c65d4ca6847761d8f999253590082672b3782875cf3f5ba0b2f9d26e3a507.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_session_authentications\n (user_session_authentication_id, user_session_id, created_at, upstream_oauth_authorization_session_id)\n VALUES ($1, $2, $3, $4)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "9c9c65d4ca6847761d8f999253590082672b3782875cf3f5ba0b2f9d26e3a507" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-9e8152d445f9996b221ad3690ba982ad01035296bf4539ca5620a043924a7292.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9e8152d445f9996b221ad3690ba982ad01035296bf4539ca5620a043924a7292.json new file mode 100644 index 00000000..0a838d1a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9e8152d445f9996b221ad3690ba982ad01035296bf4539ca5620a043924a7292.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE personal_access_tokens\n SET revoked_at = $2\n WHERE personal_session_id = $1 AND revoked_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "9e8152d445f9996b221ad3690ba982ad01035296bf4539ca5620a043924a7292" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-9eaf35f045aaca8473efc4a1f529afe24f01d9ec34609f373db5c535ccb58516.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9eaf35f045aaca8473efc4a1f529afe24f01d9ec34609f373db5c535ccb58516.json new file mode 100644 index 00000000..37173d78 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9eaf35f045aaca8473efc4a1f529afe24f01d9ec34609f373db5c535ccb58516.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO upstream_oauth_links (\n upstream_oauth_link_id,\n upstream_oauth_provider_id,\n user_id,\n subject,\n human_account_name,\n created_at\n ) VALUES ($1, $2, NULL, $3, $4, $5)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "9eaf35f045aaca8473efc4a1f529afe24f01d9ec34609f373db5c535ccb58516" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-9f7bdc034c618e47e49c467d0d7f5b8c297d055abe248cc876dbc12c5a7dc920.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9f7bdc034c618e47e49c467d0d7f5b8c297d055abe248cc876dbc12c5a7dc920.json new file mode 100644 index 00000000..55b4058e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9f7bdc034c618e47e49c467d0d7f5b8c297d055abe248cc876dbc12c5a7dc920.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO compat_refresh_tokens\n (compat_refresh_token_id, compat_session_id,\n compat_access_token_id, refresh_token, created_at)\n VALUES ($1, $2, $3, $4, $5)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Uuid", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "9f7bdc034c618e47e49c467d0d7f5b8c297d055abe248cc876dbc12c5a7dc920" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-9fe87eeaf4b7d0ba09b59ddad3476eb57ccb6e4053ab8f4450dd4a9d1f6ba108.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9fe87eeaf4b7d0ba09b59ddad3476eb57ccb6e4053ab8f4450dd4a9d1f6ba108.json new file mode 100644 index 00000000..03d162af --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-9fe87eeaf4b7d0ba09b59ddad3476eb57ccb6e4053ab8f4450dd4a9d1f6ba108.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT policy_data_id, created_at, data\n FROM policy_data\n ORDER BY policy_data_id DESC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "policy_data_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 2, + "name": "data", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "9fe87eeaf4b7d0ba09b59ddad3476eb57ccb6e4053ab8f4450dd4a9d1f6ba108" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-a0be6c56e470382b9470df414497e260ba8911123744980e24a52bc9b95bd056.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a0be6c56e470382b9470df414497e260ba8911123744980e24a52bc9b95bd056.json new file mode 100644 index 00000000..3542f848 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a0be6c56e470382b9470df414497e260ba8911123744980e24a52bc9b95bd056.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO personal_access_tokens\n (personal_access_token_id, personal_session_id, access_token_sha256, created_at, expires_at)\n VALUES ($1, $2, $3, $4, $5)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Bytea", + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "a0be6c56e470382b9470df414497e260ba8911123744980e24a52bc9b95bd056" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-a2f7433f06fb4f6a7ad5ac6c1db18705276bce41e9b19d5d7e910ad4b767fb5e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a2f7433f06fb4f6a7ad5ac6c1db18705276bce41e9b19d5d7e910ad4b767fb5e.json new file mode 100644 index 00000000..e87c3586 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a2f7433f06fb4f6a7ad5ac6c1db18705276bce41e9b19d5d7e910ad4b767fb5e.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO oauth2_refresh_tokens\n (oauth2_refresh_token_id, oauth2_session_id, oauth2_access_token_id,\n refresh_token, created_at)\n VALUES\n ($1, $2, $3, $4, $5)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Uuid", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "a2f7433f06fb4f6a7ad5ac6c1db18705276bce41e9b19d5d7e910ad4b767fb5e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-a50eb326c3522f971f6ee7e13dff61efbeb1ec24e2c694e1673347bae993762d.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a50eb326c3522f971f6ee7e13dff61efbeb1ec24e2c694e1673347bae993762d.json new file mode 100644 index 00000000..f1e85750 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a50eb326c3522f971f6ee7e13dff61efbeb1ec24e2c694e1673347bae993762d.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH to_delete AS (\n SELECT user_registration_id\n FROM user_registrations\n WHERE ($1::uuid IS NULL OR user_registration_id > $1)\n AND user_registration_id <= $2\n ORDER BY user_registration_id\n LIMIT $3\n )\n DELETE FROM user_registrations\n USING to_delete\n WHERE user_registrations.user_registration_id = to_delete.user_registration_id\n RETURNING user_registrations.user_registration_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_registration_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "a50eb326c3522f971f6ee7e13dff61efbeb1ec24e2c694e1673347bae993762d" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-a63a217981b97448ddcc96b2489ddd9d3bc8c99b5b8b1d373939fc3ae9715c27.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a63a217981b97448ddcc96b2489ddd9d3bc8c99b5b8b1d373939fc3ae9715c27.json new file mode 100644 index 00000000..407258ab --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a63a217981b97448ddcc96b2489ddd9d3bc8c99b5b8b1d373939fc3ae9715c27.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE queue_jobs\n SET status = 'completed', completed_at = $1\n WHERE queue_job_id = $2 AND status = 'running'\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "a63a217981b97448ddcc96b2489ddd9d3bc8c99b5b8b1d373939fc3ae9715c27" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-a7094d84d313602729fde155cfbe63041fca7cbab407f98452462ec45e3cfd16.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a7094d84d313602729fde155cfbe63041fca7cbab407f98452462ec45e3cfd16.json new file mode 100644 index 00000000..effac88b --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a7094d84d313602729fde155cfbe63041fca7cbab407f98452462ec45e3cfd16.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT compat_sso_login_id\n , login_token\n , redirect_uri\n , created_at\n , fulfilled_at\n , exchanged_at\n , compat_session_id\n , user_session_id\n\n FROM compat_sso_logins\n WHERE compat_sso_login_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "compat_sso_login_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "login_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "redirect_uri", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "fulfilled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "exchanged_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "compat_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "user_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "a7094d84d313602729fde155cfbe63041fca7cbab407f98452462ec45e3cfd16" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-a75a6a08c9639053cfc3cffa9d4a009785f358b334f5c586c2e358f0d0b4d856.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a75a6a08c9639053cfc3cffa9d4a009785f358b334f5c586c2e358f0d0b4d856.json new file mode 100644 index 00000000..c7146b7d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a75a6a08c9639053cfc3cffa9d4a009785f358b334f5c586c2e358f0d0b4d856.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_refresh_token_id\n , refresh_token\n , created_at\n , consumed_at\n , revoked_at\n , oauth2_access_token_id\n , oauth2_session_id\n , next_oauth2_refresh_token_id\n FROM oauth2_refresh_tokens\n\n WHERE refresh_token = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_refresh_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "refresh_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "consumed_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "revoked_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "oauth2_access_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "oauth2_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "next_oauth2_refresh_token_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + true, + false, + true + ] + }, + "hash": "a75a6a08c9639053cfc3cffa9d4a009785f358b334f5c586c2e358f0d0b4d856" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-a7f780528882a2ae66c45435215763eed0582264861436eab3f862e3eb12cab1.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a7f780528882a2ae66c45435215763eed0582264861436eab3f862e3eb12cab1.json new file mode 100644 index 00000000..3b74f39d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-a7f780528882a2ae66c45435215763eed0582264861436eab3f862e3eb12cab1.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO compat_access_tokens\n (compat_access_token_id, compat_session_id, access_token, created_at, expires_at)\n VALUES ($1, $2, $3, $4, $5)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "a7f780528882a2ae66c45435215763eed0582264861436eab3f862e3eb12cab1" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-ab34912b42a48a8b5c8d63e271b99b7d0b690a2471873c6654b1b6cf2079b95c.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ab34912b42a48a8b5c8d63e271b99b7d0b690a2471873c6654b1b6cf2079b95c.json new file mode 100644 index 00000000..85b99b33 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ab34912b42a48a8b5c8d63e271b99b7d0b690a2471873c6654b1b6cf2079b95c.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE compat_sessions cs\n SET finished_at = $2\n WHERE compat_session_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "ab34912b42a48a8b5c8d63e271b99b7d0b690a2471873c6654b1b6cf2079b95c" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-ae6bf8958c4d9837d63f56574e91f91acc6076a8521adc3e30a83bf70e2121a0.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ae6bf8958c4d9837d63f56574e91f91acc6076a8521adc3e30a83bf70e2121a0.json new file mode 100644 index 00000000..d46a6c5d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ae6bf8958c4d9837d63f56574e91f91acc6076a8521adc3e30a83bf70e2121a0.json @@ -0,0 +1,47 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_email_authentication_code_id\n , user_email_authentication_id\n , code\n , created_at\n , expires_at\n FROM user_email_authentication_codes\n WHERE user_email_authentication_id = $1\n AND code = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_email_authentication_code_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "user_email_authentication_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "code", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "expires_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "ae6bf8958c4d9837d63f56574e91f91acc6076a8521adc3e30a83bf70e2121a0" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-afa86e79e3de2a83265cb0db8549d378a2f11b2a27bbd86d60558318c87eb698.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-afa86e79e3de2a83265cb0db8549d378a2f11b2a27bbd86d60558318c87eb698.json new file mode 100644 index 00000000..d6fa5341 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-afa86e79e3de2a83265cb0db8549d378a2f11b2a27bbd86d60558318c87eb698.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO oauth2_access_tokens\n (oauth2_access_token_id, oauth2_session_id, access_token, created_at, expires_at)\n VALUES\n ($1, $2, $3, $4, $5)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "afa86e79e3de2a83265cb0db8549d378a2f11b2a27bbd86d60558318c87eb698" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-b3568613352efae1125a88565d886157d96866f7ef9b09b03a45ba4322664bd0.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b3568613352efae1125a88565d886157d96866f7ef9b09b03a45ba4322664bd0.json new file mode 100644 index 00000000..9acc3f81 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b3568613352efae1125a88565d886157d96866f7ef9b09b03a45ba4322664bd0.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registration_tokens\n SET revoked_at = $2\n WHERE user_registration_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "b3568613352efae1125a88565d886157d96866f7ef9b09b03a45ba4322664bd0" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-b60d34f4d250c12f75dba10491c1337d69aebad12be6fbfbdde91e34083ba4ed.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b60d34f4d250c12f75dba10491c1337d69aebad12be6fbfbdde91e34083ba4ed.json new file mode 100644 index 00000000..5b4d6fb5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b60d34f4d250c12f75dba10491c1337d69aebad12be6fbfbdde91e34083ba4ed.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_registrations\n SET hashed_password = $2, hashed_password_version = $3\n WHERE user_registration_id = $1 AND completed_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "b60d34f4d250c12f75dba10491c1337d69aebad12be6fbfbdde91e34083ba4ed" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-b6c4f4a23968cba2a82c2b7cfffc05a7ed582c9e5c1f65d27b0686f843ccfe42.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b6c4f4a23968cba2a82c2b7cfffc05a7ed582c9e5c1f65d27b0686f843ccfe42.json new file mode 100644 index 00000000..304f9c96 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b6c4f4a23968cba2a82c2b7cfffc05a7ed582c9e5c1f65d27b0686f843ccfe42.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO policy_data (policy_data_id, created_at, data)\n VALUES ($1, $2, $3)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz", + "Jsonb" + ] + }, + "nullable": [] + }, + "hash": "b6c4f4a23968cba2a82c2b7cfffc05a7ed582c9e5c1f65d27b0686f843ccfe42" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-b700dc3f7d0f86f4904725d8357e34b7e457f857ed37c467c314142877fd5367.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b700dc3f7d0f86f4904725d8357e34b7e457f857ed37c467c314142877fd5367.json new file mode 100644 index 00000000..ff538757 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b700dc3f7d0f86f4904725d8357e34b7e457f857ed37c467c314142877fd5367.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_sessions\n SET finished_at = $2\n WHERE oauth2_session_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "b700dc3f7d0f86f4904725d8357e34b7e457f857ed37c467c314142877fd5367" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-b74e4d620bed4832a4e8e713a346691f260a7eca4bf494d6fb11c7cf699adaad.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b74e4d620bed4832a4e8e713a346691f260a7eca4bf494d6fb11c7cf699adaad.json new file mode 100644 index 00000000..68f1b176 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b74e4d620bed4832a4e8e713a346691f260a7eca4bf494d6fb11c7cf699adaad.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE compat_sessions SET finished_at = $3 WHERE user_id = $1 AND device_id = $2 AND finished_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "b74e4d620bed4832a4e8e713a346691f260a7eca4bf494d6fb11c7cf699adaad" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-b91cc2458e1a530e7cadbd1ca3e2eaf93e1c44108b6770a24c9a24ac29db37d3.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b91cc2458e1a530e7cadbd1ca3e2eaf93e1c44108b6770a24c9a24ac29db37d3.json new file mode 100644 index 00000000..68df599b --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b91cc2458e1a530e7cadbd1ca3e2eaf93e1c44108b6770a24c9a24ac29db37d3.json @@ -0,0 +1,100 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_registration_id\n , ip_address as \"ip_address: IpAddr\"\n , user_agent\n , post_auth_action\n , username\n , display_name\n , terms_url\n , email_authentication_id\n , user_registration_token_id\n , hashed_password\n , hashed_password_version\n , upstream_oauth_authorization_session_id\n , created_at\n , completed_at\n FROM user_registrations\n WHERE user_registration_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_registration_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "ip_address: IpAddr", + "type_info": "Inet" + }, + { + "ordinal": 2, + "name": "user_agent", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "post_auth_action", + "type_info": "Jsonb" + }, + { + "ordinal": 4, + "name": "username", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "display_name", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "terms_url", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "email_authentication_id", + "type_info": "Uuid" + }, + { + "ordinal": 8, + "name": "user_registration_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 9, + "name": "hashed_password", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "hashed_password_version", + "type_info": "Int4" + }, + { + "ordinal": 11, + "name": "upstream_oauth_authorization_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 12, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 13, + "name": "completed_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + true, + true, + true, + false, + true, + true, + true, + true, + true, + true, + true, + false, + true + ] + }, + "hash": "b91cc2458e1a530e7cadbd1ca3e2eaf93e1c44108b6770a24c9a24ac29db37d3" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-b992283a9b43cbb8f86149f3f55cb47fb628dabd8fadc50e6a5772903f851e1c.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b992283a9b43cbb8f86149f3f55cb47fb628dabd8fadc50e6a5772903f851e1c.json new file mode 100644 index 00000000..1f7ec952 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-b992283a9b43cbb8f86149f3f55cb47fb628dabd8fadc50e6a5772903f851e1c.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM upstream_oauth_authorization_sessions\n WHERE upstream_oauth_provider_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "b992283a9b43cbb8f86149f3f55cb47fb628dabd8fadc50e6a5772903f851e1c" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-bb0f782756c274c06c1b63af6fc3ac2a7cedfd4247b57f062d348b4b1b36bef1.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-bb0f782756c274c06c1b63af6fc3ac2a7cedfd4247b57f062d348b4b1b36bef1.json new file mode 100644 index 00000000..f6edc0ee --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-bb0f782756c274c06c1b63af6fc3ac2a7cedfd4247b57f062d348b4b1b36bef1.json @@ -0,0 +1,142 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_client_id\n , metadata_digest\n , encrypted_client_secret\n , application_type\n , redirect_uris\n , grant_type_authorization_code\n , grant_type_refresh_token\n , grant_type_client_credentials\n , grant_type_device_code\n , client_name\n , logo_uri\n , client_uri\n , policy_uri\n , tos_uri\n , jwks_uri\n , jwks\n , id_token_signed_response_alg\n , userinfo_signed_response_alg\n , token_endpoint_auth_method\n , token_endpoint_auth_signing_alg\n , initiate_login_uri\n FROM oauth2_clients c\n\n WHERE oauth2_client_id = ANY($1::uuid[])\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "metadata_digest", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "encrypted_client_secret", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "application_type", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "redirect_uris", + "type_info": "TextArray" + }, + { + "ordinal": 5, + "name": "grant_type_authorization_code", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "grant_type_refresh_token", + "type_info": "Bool" + }, + { + "ordinal": 7, + "name": "grant_type_client_credentials", + "type_info": "Bool" + }, + { + "ordinal": 8, + "name": "grant_type_device_code", + "type_info": "Bool" + }, + { + "ordinal": 9, + "name": "client_name", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "logo_uri", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "client_uri", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "policy_uri", + "type_info": "Text" + }, + { + "ordinal": 13, + "name": "tos_uri", + "type_info": "Text" + }, + { + "ordinal": 14, + "name": "jwks_uri", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "jwks", + "type_info": "Jsonb" + }, + { + "ordinal": 16, + "name": "id_token_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "userinfo_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "token_endpoint_auth_method", + "type_info": "Text" + }, + { + "ordinal": 19, + "name": "token_endpoint_auth_signing_alg", + "type_info": "Text" + }, + { + "ordinal": 20, + "name": "initiate_login_uri", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "UuidArray" + ] + }, + "nullable": [ + false, + true, + true, + true, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "bb0f782756c274c06c1b63af6fc3ac2a7cedfd4247b57f062d348b4b1b36bef1" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-bb141d28c0c82244f31d542038c314d05ceb3a7b8f35397c0faef3b36d2d14a7.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-bb141d28c0c82244f31d542038c314d05ceb3a7b8f35397c0faef3b36d2d14a7.json new file mode 100644 index 00000000..fc966587 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-bb141d28c0c82244f31d542038c314d05ceb3a7b8f35397c0faef3b36d2d14a7.json @@ -0,0 +1,100 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_device_code_grant_id\n , oauth2_client_id\n , scope\n , device_code\n , user_code\n , created_at\n , expires_at\n , fulfilled_at\n , rejected_at\n , exchanged_at\n , user_session_id\n , oauth2_session_id\n , ip_address as \"ip_address: IpAddr\"\n , user_agent\n FROM\n oauth2_device_code_grant\n\n WHERE user_code = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_device_code_grant_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "scope", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "device_code", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "user_code", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "fulfilled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "rejected_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "exchanged_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "user_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 11, + "name": "oauth2_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 12, + "name": "ip_address: IpAddr", + "type_info": "Inet" + }, + { + "ordinal": 13, + "name": "user_agent", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "bb141d28c0c82244f31d542038c314d05ceb3a7b8f35397c0faef3b36d2d14a7" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-bbf62633c561706a762089bbab2f76a9ba3e2ed3539ef16accb601fb609c2ec9.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-bbf62633c561706a762089bbab2f76a9ba3e2ed3539ef16accb601fb609c2ec9.json new file mode 100644 index 00000000..b79260d8 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-bbf62633c561706a762089bbab2f76a9ba3e2ed3539ef16accb601fb609c2ec9.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE compat_access_tokens\n SET expires_at = $2\n WHERE compat_access_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "bbf62633c561706a762089bbab2f76a9ba3e2ed3539ef16accb601fb609c2ec9" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-c09e0bb0378d9dfb15de7f2f1209fab6ea87589819128e6fc9ed5da11dfc2770.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c09e0bb0378d9dfb15de7f2f1209fab6ea87589819128e6fc9ed5da11dfc2770.json new file mode 100644 index 00000000..2ef7b86c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c09e0bb0378d9dfb15de7f2f1209fab6ea87589819128e6fc9ed5da11dfc2770.json @@ -0,0 +1,58 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_access_token_id\n , access_token\n , created_at\n , expires_at\n , revoked_at\n , oauth2_session_id\n , first_used_at\n\n FROM oauth2_access_tokens\n\n WHERE access_token = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_access_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "access_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "revoked_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "oauth2_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "first_used_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + true + ] + }, + "hash": "c09e0bb0378d9dfb15de7f2f1209fab6ea87589819128e6fc9ed5da11dfc2770" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-c29fa41743811a6ac3a9b952b6ea75d18e914f823902587b63c9f295407144b1.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c29fa41743811a6ac3a9b952b6ea75d18e914f823902587b63c9f295407144b1.json new file mode 100644 index 00000000..3ba8612b --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c29fa41743811a6ac3a9b952b6ea75d18e914f823902587b63c9f295407144b1.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE users\n SET locked_at = $1\n WHERE user_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "c29fa41743811a6ac3a9b952b6ea75d18e914f823902587b63c9f295407144b1" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-c5e7dbb22488aca427b85b3415bd1f1a1766ff865f2e08a5daa095d2a1ccbd56.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c5e7dbb22488aca427b85b3415bd1f1a1766ff865f2e08a5daa095d2a1ccbd56.json new file mode 100644 index 00000000..9bec3027 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c5e7dbb22488aca427b85b3415bd1f1a1766ff865f2e08a5daa095d2a1ccbd56.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_authorization_grants\n SET exchanged_at = $2\n WHERE oauth2_authorization_grant_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "c5e7dbb22488aca427b85b3415bd1f1a1766ff865f2e08a5daa095d2a1ccbd56" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-c960f4f5571ee68816c49898125979f3c78c2caca52cb4b8dc9880e669a1f23e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c960f4f5571ee68816c49898125979f3c78c2caca52cb4b8dc9880e669a1f23e.json new file mode 100644 index 00000000..20cd2c70 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c960f4f5571ee68816c49898125979f3c78c2caca52cb4b8dc9880e669a1f23e.json @@ -0,0 +1,130 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_authorization_grant_id\n , created_at\n , cancelled_at\n , fulfilled_at\n , exchanged_at\n , scope\n , state\n , redirect_uri\n , response_mode\n , nonce\n , oauth2_client_id\n , authorization_code\n , response_type_code\n , response_type_id_token\n , code_challenge\n , code_challenge_method\n , login_hint\n , locale\n , oauth2_session_id\n FROM\n oauth2_authorization_grants\n\n WHERE oauth2_authorization_grant_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_authorization_grant_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 2, + "name": "cancelled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "fulfilled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "exchanged_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "scope", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "state", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "redirect_uri", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "response_mode", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "nonce", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 11, + "name": "authorization_code", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "response_type_code", + "type_info": "Bool" + }, + { + "ordinal": 13, + "name": "response_type_id_token", + "type_info": "Bool" + }, + { + "ordinal": 14, + "name": "code_challenge", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "code_challenge_method", + "type_info": "Text" + }, + { + "ordinal": 16, + "name": "login_hint", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "locale", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "oauth2_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + true, + true, + true, + false, + true, + false, + false, + true, + false, + true, + false, + false, + true, + true, + true, + true, + true + ] + }, + "hash": "c960f4f5571ee68816c49898125979f3c78c2caca52cb4b8dc9880e669a1f23e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-c984ae0496d0bd7520ee3d6761ce6a4f61a6a2001b597e4c63ba4588ec5cf530.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c984ae0496d0bd7520ee3d6761ce6a4f61a6a2001b597e4c63ba4588ec5cf530.json new file mode 100644 index 00000000..d3b566dd --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-c984ae0496d0bd7520ee3d6761ce6a4f61a6a2001b597e4c63ba4588ec5cf530.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO \"oauth2_device_code_grant\"\n ( oauth2_device_code_grant_id\n , oauth2_client_id\n , scope\n , device_code\n , user_code\n , created_at\n , expires_at\n , ip_address\n , user_agent\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Text", + "Text", + "Timestamptz", + "Timestamptz", + "Inet", + "Text" + ] + }, + "nullable": [] + }, + "hash": "c984ae0496d0bd7520ee3d6761ce6a4f61a6a2001b597e4c63ba4588ec5cf530" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-ca093cab5143bb3dded2eda9e82473215f4d3c549ea2c5a4f860a102cc46a667.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ca093cab5143bb3dded2eda9e82473215f4d3c549ea2c5a4f860a102cc46a667.json new file mode 100644 index 00000000..8ae291bb --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ca093cab5143bb3dded2eda9e82473215f4d3c549ea2c5a4f860a102cc46a667.json @@ -0,0 +1,41 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_email_id\n , user_id\n , email\n , created_at\n FROM user_emails\n\n WHERE user_id = $1 AND LOWER(email) = LOWER($2)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_email_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false + ] + }, + "hash": "ca093cab5143bb3dded2eda9e82473215f4d3c549ea2c5a4f860a102cc46a667" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-cc60ad934d347fb4546205d1fe07e9d2f127cb15b1bb650d1ea3805a4c55b196.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-cc60ad934d347fb4546205d1fe07e9d2f127cb15b1bb650d1ea3805a4c55b196.json new file mode 100644 index 00000000..00e04bff --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-cc60ad934d347fb4546205d1fe07e9d2f127cb15b1bb650d1ea3805a4c55b196.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM upstream_oauth_links\n WHERE upstream_oauth_link_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "cc60ad934d347fb4546205d1fe07e9d2f127cb15b1bb650d1ea3805a4c55b196" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-ce36eb8d3e4478a4e8520919ff41f1a5e6470cef581b1638f5578546dd28c4df.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ce36eb8d3e4478a4e8520919ff41f1a5e6470cef581b1638f5578546dd28c4df.json new file mode 100644 index 00000000..1a425c86 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ce36eb8d3e4478a4e8520919ff41f1a5e6470cef581b1638f5578546dd28c4df.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT compat_sso_login_id\n , login_token\n , redirect_uri\n , created_at\n , fulfilled_at\n , exchanged_at\n , compat_session_id\n , user_session_id\n\n FROM compat_sso_logins\n WHERE login_token = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "compat_sso_login_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "login_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "redirect_uri", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "fulfilled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "exchanged_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "compat_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "user_session_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "ce36eb8d3e4478a4e8520919ff41f1a5e6470cef581b1638f5578546dd28c4df" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-cf2eeca6d8dbc2cc72160a26e81f6e963096edb610183ba13cbbbd3d95c4134b.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-cf2eeca6d8dbc2cc72160a26e81f6e963096edb610183ba13cbbbd3d95c4134b.json new file mode 100644 index 00000000..84f43c9d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-cf2eeca6d8dbc2cc72160a26e81f6e963096edb610183ba13cbbbd3d95c4134b.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n to_delete AS (\n SELECT oauth2_access_token_id\n FROM oauth2_access_tokens\n WHERE revoked_at IS NOT NULL\n AND ($1::timestamptz IS NULL OR revoked_at >= $1::timestamptz)\n AND revoked_at < $2::timestamptz\n ORDER BY revoked_at ASC\n LIMIT $3\n FOR UPDATE\n ),\n\n deleted AS (\n DELETE FROM oauth2_access_tokens\n USING to_delete\n WHERE oauth2_access_tokens.oauth2_access_token_id = to_delete.oauth2_access_token_id\n RETURNING oauth2_access_tokens.revoked_at\n )\n\n SELECT\n COUNT(*) as \"count!\",\n MAX(revoked_at) as last_revoked_at\n FROM deleted\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_revoked_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "cf2eeca6d8dbc2cc72160a26e81f6e963096edb610183ba13cbbbd3d95c4134b" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-cf654533cfed946e9ac52dbcea1f50be3dfdac0fbfb1e8a0204c0c9c103ba5b0.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-cf654533cfed946e9ac52dbcea1f50be3dfdac0fbfb1e8a0204c0c9c103ba5b0.json new file mode 100644 index 00000000..78ca4d0a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-cf654533cfed946e9ac52dbcea1f50be3dfdac0fbfb1e8a0204c0c9c103ba5b0.json @@ -0,0 +1,142 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_client_id\n , metadata_digest\n , encrypted_client_secret\n , application_type\n , redirect_uris\n , grant_type_authorization_code\n , grant_type_refresh_token\n , grant_type_client_credentials\n , grant_type_device_code\n , client_name\n , logo_uri\n , client_uri\n , policy_uri\n , tos_uri\n , jwks_uri\n , jwks\n , id_token_signed_response_alg\n , userinfo_signed_response_alg\n , token_endpoint_auth_method\n , token_endpoint_auth_signing_alg\n , initiate_login_uri\n FROM oauth2_clients\n WHERE metadata_digest = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "metadata_digest", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "encrypted_client_secret", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "application_type", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "redirect_uris", + "type_info": "TextArray" + }, + { + "ordinal": 5, + "name": "grant_type_authorization_code", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "grant_type_refresh_token", + "type_info": "Bool" + }, + { + "ordinal": 7, + "name": "grant_type_client_credentials", + "type_info": "Bool" + }, + { + "ordinal": 8, + "name": "grant_type_device_code", + "type_info": "Bool" + }, + { + "ordinal": 9, + "name": "client_name", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "logo_uri", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "client_uri", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "policy_uri", + "type_info": "Text" + }, + { + "ordinal": 13, + "name": "tos_uri", + "type_info": "Text" + }, + { + "ordinal": 14, + "name": "jwks_uri", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "jwks", + "type_info": "Jsonb" + }, + { + "ordinal": 16, + "name": "id_token_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "userinfo_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "token_endpoint_auth_method", + "type_info": "Text" + }, + { + "ordinal": 19, + "name": "token_endpoint_auth_signing_alg", + "type_info": "Text" + }, + { + "ordinal": 20, + "name": "initiate_login_uri", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + true, + true, + true, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "cf654533cfed946e9ac52dbcea1f50be3dfdac0fbfb1e8a0204c0c9c103ba5b0" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-d02248136aa6b27636814dee4e0bc38395ab6c6fdf979616fa16fc490897cee3.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d02248136aa6b27636814dee4e0bc38395ab6c6fdf979616fa16fc490897cee3.json new file mode 100644 index 00000000..99df8e13 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d02248136aa6b27636814dee4e0bc38395ab6c6fdf979616fa16fc490897cee3.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT personal_access_token_id\n , personal_session_id\n , created_at\n , expires_at\n , revoked_at\n\n FROM personal_access_tokens\n\n WHERE personal_session_id = $1\n AND revoked_at IS NULL\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "personal_access_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "personal_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "revoked_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + true + ] + }, + "hash": "d02248136aa6b27636814dee4e0bc38395ab6c6fdf979616fa16fc490897cee3" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-d0355d4e98bec6120f17d8cf81ac8c30ed19e9cebd0c8e7c7918b1c3ca0e3cba.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d0355d4e98bec6120f17d8cf81ac8c30ed19e9cebd0c8e7c7918b1c3ca0e3cba.json new file mode 100644 index 00000000..2e20ac5c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d0355d4e98bec6120f17d8cf81ac8c30ed19e9cebd0c8e7c7918b1c3ca0e3cba.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_registration_token_id,\n token,\n usage_limit,\n times_used,\n created_at,\n last_used_at,\n expires_at,\n revoked_at\n FROM user_registration_tokens\n WHERE user_registration_token_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_registration_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "usage_limit", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "times_used", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "last_used_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "revoked_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "d0355d4e98bec6120f17d8cf81ac8c30ed19e9cebd0c8e7c7918b1c3ca0e3cba" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-d26e42d9fd2b2ee3cf9702c1666d83e7cffa26b320ae1442c7f3e22376c4a4ee.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d26e42d9fd2b2ee3cf9702c1666d83e7cffa26b320ae1442c7f3e22376c4a4ee.json new file mode 100644 index 00000000..c607483c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d26e42d9fd2b2ee3cf9702c1666d83e7cffa26b320ae1442c7f3e22376c4a4ee.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_device_code_grant\n SET fulfilled_at = $1\n , user_session_id = $2\n WHERE oauth2_device_code_grant_id = $3\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "d26e42d9fd2b2ee3cf9702c1666d83e7cffa26b320ae1442c7f3e22376c4a4ee" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-d4bc51c30f1119ea9d117fb565ec554d63c8773040679a77e99ac3fa24cec71d.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d4bc51c30f1119ea9d117fb565ec554d63c8773040679a77e99ac3fa24cec71d.json new file mode 100644 index 00000000..7403c7cb --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d4bc51c30f1119ea9d117fb565ec554d63c8773040679a77e99ac3fa24cec71d.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH to_delete AS (\n SELECT oauth2_authorization_grant_id\n FROM oauth2_authorization_grants\n WHERE ($1::uuid IS NULL OR oauth2_authorization_grant_id > $1)\n AND oauth2_authorization_grant_id <= $2\n ORDER BY oauth2_authorization_grant_id\n LIMIT $3\n )\n DELETE FROM oauth2_authorization_grants\n USING to_delete\n WHERE oauth2_authorization_grants.oauth2_authorization_grant_id = to_delete.oauth2_authorization_grant_id\n RETURNING oauth2_authorization_grants.oauth2_authorization_grant_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_authorization_grant_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "d4bc51c30f1119ea9d117fb565ec554d63c8773040679a77e99ac3fa24cec71d" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-d7a0e4fa2f168976505405c7e7800847f3379f7b57c0972659a35bfb68b0f6cd.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d7a0e4fa2f168976505405c7e7800847f3379f7b57c0972659a35bfb68b0f6cd.json new file mode 100644 index 00000000..c9cebfad --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d7a0e4fa2f168976505405c7e7800847f3379f7b57c0972659a35bfb68b0f6cd.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_recovery_tickets (\n user_recovery_ticket_id\n , user_recovery_session_id\n , user_email_id\n , ticket\n , created_at\n , expires_at\n )\n VALUES ($1, $2, $3, $4, $5, $6)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Uuid", + "Text", + "Timestamptz", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "d7a0e4fa2f168976505405c7e7800847f3379f7b57c0972659a35bfb68b0f6cd" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-d8f0b02952e786dd4309eac9de04a359aea3a46e5d4e07764cec56ce5d6609c0.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d8f0b02952e786dd4309eac9de04a359aea3a46e5d4e07764cec56ce5d6609c0.json new file mode 100644 index 00000000..275273b9 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d8f0b02952e786dd4309eac9de04a359aea3a46e5d4e07764cec56ce5d6609c0.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n to_delete AS (\n SELECT oauth2_session_id, finished_at\n FROM oauth2_sessions\n WHERE finished_at IS NOT NULL\n AND ($1::timestamptz IS NULL OR finished_at >= $1)\n AND finished_at < $2\n ORDER BY finished_at ASC\n LIMIT $3\n FOR UPDATE\n ),\n deleted_refresh_tokens AS (\n DELETE FROM oauth2_refresh_tokens USING to_delete\n WHERE oauth2_refresh_tokens.oauth2_session_id = to_delete.oauth2_session_id\n ),\n deleted_access_tokens AS (\n DELETE FROM oauth2_access_tokens USING to_delete\n WHERE oauth2_access_tokens.oauth2_session_id = to_delete.oauth2_session_id\n ),\n deleted_sessions AS (\n DELETE FROM oauth2_sessions USING to_delete\n WHERE oauth2_sessions.oauth2_session_id = to_delete.oauth2_session_id\n RETURNING oauth2_sessions.finished_at\n )\n SELECT COUNT(*) as \"count!\", MAX(finished_at) as last_finished_at FROM deleted_sessions\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_finished_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "d8f0b02952e786dd4309eac9de04a359aea3a46e5d4e07764cec56ce5d6609c0" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-d95cd1b4bcfa1d7bb236d49e1956fcc9a684609956972fe4f95aac13f30b2530.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d95cd1b4bcfa1d7bb236d49e1956fcc9a684609956972fe4f95aac13f30b2530.json new file mode 100644 index 00000000..11953021 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-d95cd1b4bcfa1d7bb236d49e1956fcc9a684609956972fe4f95aac13f30b2530.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n to_delete AS (\n SELECT user_session_id, finished_at\n FROM user_sessions us\n WHERE us.finished_at IS NOT NULL\n AND ($1::timestamptz IS NULL OR us.finished_at >= $1)\n AND us.finished_at < $2\n -- Only delete if no oauth2_sessions reference this user_session\n AND NOT EXISTS (\n SELECT 1 FROM oauth2_sessions os\n WHERE os.user_session_id = us.user_session_id\n )\n -- Only delete if no compat_sessions reference this user_session\n AND NOT EXISTS (\n SELECT 1 FROM compat_sessions cs\n WHERE cs.user_session_id = us.user_session_id\n )\n ORDER BY us.finished_at ASC\n LIMIT $3\n FOR UPDATE OF us\n ),\n deleted_authentications AS (\n DELETE FROM user_session_authentications USING to_delete\n WHERE user_session_authentications.user_session_id = to_delete.user_session_id\n ),\n deleted_sessions AS (\n DELETE FROM user_sessions USING to_delete\n WHERE user_sessions.user_session_id = to_delete.user_session_id\n RETURNING user_sessions.finished_at\n )\n SELECT COUNT(*) as \"count!\", MAX(finished_at) as last_finished_at FROM deleted_sessions\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_finished_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null + ] + }, + "hash": "d95cd1b4bcfa1d7bb236d49e1956fcc9a684609956972fe4f95aac13f30b2530" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-da02f93d7346992a9795f12b900f91ac0b326dd751c0d374d6ef4d19f671d22e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-da02f93d7346992a9795f12b900f91ac0b326dd751c0d374d6ef4d19f671d22e.json new file mode 100644 index 00000000..378ca2d7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-da02f93d7346992a9795f12b900f91ac0b326dd751c0d374d6ef4d19f671d22e.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO oauth2_clients\n ( oauth2_client_id\n , encrypted_client_secret\n , redirect_uris\n , grant_type_authorization_code\n , grant_type_refresh_token\n , grant_type_client_credentials\n , grant_type_device_code\n , token_endpoint_auth_method\n , jwks\n , client_name\n , jwks_uri\n , is_static\n )\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, TRUE)\n ON CONFLICT (oauth2_client_id)\n DO\n UPDATE SET encrypted_client_secret = EXCLUDED.encrypted_client_secret\n , redirect_uris = EXCLUDED.redirect_uris\n , grant_type_authorization_code = EXCLUDED.grant_type_authorization_code\n , grant_type_refresh_token = EXCLUDED.grant_type_refresh_token\n , grant_type_client_credentials = EXCLUDED.grant_type_client_credentials\n , grant_type_device_code = EXCLUDED.grant_type_device_code\n , token_endpoint_auth_method = EXCLUDED.token_endpoint_auth_method\n , jwks = EXCLUDED.jwks\n , client_name = EXCLUDED.client_name\n , jwks_uri = EXCLUDED.jwks_uri\n , is_static = TRUE\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "TextArray", + "Bool", + "Bool", + "Bool", + "Bool", + "Text", + "Jsonb", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "da02f93d7346992a9795f12b900f91ac0b326dd751c0d374d6ef4d19f671d22e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-dbf4be84eeff9ea51b00185faae2d453ab449017ed492bf6711dc7fceb630880.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-dbf4be84eeff9ea51b00185faae2d453ab449017ed492bf6711dc7fceb630880.json new file mode 100644 index 00000000..943c31fb --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-dbf4be84eeff9ea51b00185faae2d453ab449017ed492bf6711dc7fceb630880.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_sessions\n SET finished_at = $1\n WHERE user_session_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "dbf4be84eeff9ea51b00185faae2d453ab449017ed492bf6711dc7fceb630880" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-dca9b361c4409b14498b85f192b0034201575a49e0240ac6715b55ad8d381d0e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-dca9b361c4409b14498b85f192b0034201575a49e0240ac6715b55ad8d381d0e.json new file mode 100644 index 00000000..39447cd1 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-dca9b361c4409b14498b85f192b0034201575a49e0240ac6715b55ad8d381d0e.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM personal_sessions\n WHERE owner_oauth2_client_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "dca9b361c4409b14498b85f192b0034201575a49e0240ac6715b55ad8d381d0e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-dd02cc4a48123c28b34da8501060096c33df9e30611ef89d01bf0502119cbbe1.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-dd02cc4a48123c28b34da8501060096c33df9e30611ef89d01bf0502119cbbe1.json new file mode 100644 index 00000000..01b78325 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-dd02cc4a48123c28b34da8501060096c33df9e30611ef89d01bf0502119cbbe1.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE user_email_authentications\n SET completed_at = $2\n WHERE user_email_authentication_id = $1\n AND completed_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "dd02cc4a48123c28b34da8501060096c33df9e30611ef89d01bf0502119cbbe1" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-dda97742d389ffeeaab33d352d05767e2150f7da3cf384a7f44741c769f44144.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-dda97742d389ffeeaab33d352d05767e2150f7da3cf384a7f44741c769f44144.json new file mode 100644 index 00000000..c36ba7d2 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-dda97742d389ffeeaab33d352d05767e2150f7da3cf384a7f44741c769f44144.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_email_id\n , user_id\n , email\n , created_at\n FROM user_emails\n\n WHERE user_id = $1\n\n ORDER BY email ASC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_email_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false + ] + }, + "hash": "dda97742d389ffeeaab33d352d05767e2150f7da3cf384a7f44741c769f44144" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-e02ea83d195cb58fa8525e66a6ac1dddae3f1dfb1ef48494f6aee3fd03abe6f6.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e02ea83d195cb58fa8525e66a6ac1dddae3f1dfb1ef48494f6aee3fd03abe6f6.json new file mode 100644 index 00000000..3fe70072 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e02ea83d195cb58fa8525e66a6ac1dddae3f1dfb1ef48494f6aee3fd03abe6f6.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n to_delete AS (\n SELECT user_email_authentication_id\n FROM user_email_authentications\n WHERE ($1::uuid IS NULL OR user_email_authentication_id > $1)\n AND user_email_authentication_id <= $2\n ORDER BY user_email_authentication_id\n LIMIT $3\n ),\n deleted_codes AS (\n DELETE FROM user_email_authentication_codes\n USING to_delete\n WHERE user_email_authentication_codes.user_email_authentication_id = to_delete.user_email_authentication_id\n RETURNING user_email_authentication_codes.user_email_authentication_code_id\n )\n DELETE FROM user_email_authentications\n USING to_delete\n WHERE user_email_authentications.user_email_authentication_id = to_delete.user_email_authentication_id\n RETURNING user_email_authentications.user_email_authentication_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_email_authentication_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "e02ea83d195cb58fa8525e66a6ac1dddae3f1dfb1ef48494f6aee3fd03abe6f6" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-e1746b33c2f0d10f26332195f78e1ef2f192ca66f8000d1385626154e5ce4f7e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e1746b33c2f0d10f26332195f78e1ef2f192ca66f8000d1385626154e5ce4f7e.json new file mode 100644 index 00000000..2112e760 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e1746b33c2f0d10f26332195f78e1ef2f192ca66f8000d1385626154e5ce4f7e.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT personal_access_token_id\n , personal_session_id\n , created_at\n , expires_at\n , revoked_at\n\n FROM personal_access_tokens\n\n WHERE personal_access_token_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "personal_access_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "personal_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "revoked_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + true + ] + }, + "hash": "e1746b33c2f0d10f26332195f78e1ef2f192ca66f8000d1385626154e5ce4f7e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-e291be0434ab9c346dee777e50f8e601f12c8003fe93a5ecb110d02642d14c3c.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e291be0434ab9c346dee777e50f8e601f12c8003fe93a5ecb110d02642d14c3c.json new file mode 100644 index 00000000..84ac12de --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e291be0434ab9c346dee777e50f8e601f12c8003fe93a5ecb110d02642d14c3c.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO queue_jobs\n (queue_job_id, queue_name, payload, metadata, created_at)\n VALUES ($1, $2, $3, $4, $5)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Jsonb", + "Jsonb", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "e291be0434ab9c346dee777e50f8e601f12c8003fe93a5ecb110d02642d14c3c" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-e35d56de7136d43d0803ec825b0612e4185cef838f105d66f18cb24865e45140.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e35d56de7136d43d0803ec825b0612e4185cef838f105d66f18cb24865e45140.json new file mode 100644 index 00000000..89922c4a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e35d56de7136d43d0803ec825b0612e4185cef838f105d66f18cb24865e45140.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT compat_refresh_token_id\n , refresh_token\n , created_at\n , consumed_at\n , compat_session_id\n , compat_access_token_id\n\n FROM compat_refresh_tokens\n\n WHERE compat_refresh_token_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "compat_refresh_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "refresh_token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "consumed_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "compat_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 5, + "name": "compat_access_token_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + false, + false + ] + }, + "hash": "e35d56de7136d43d0803ec825b0612e4185cef838f105d66f18cb24865e45140" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9.json new file mode 100644 index 00000000..c3c2e250 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9.json @@ -0,0 +1,100 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n upstream_oauth_authorization_session_id,\n upstream_oauth_provider_id,\n upstream_oauth_link_id,\n state,\n code_challenge_verifier,\n nonce,\n id_token,\n id_token_claims,\n extra_callback_parameters,\n userinfo,\n created_at,\n completed_at,\n consumed_at,\n unlinked_at\n FROM upstream_oauth_authorization_sessions\n WHERE upstream_oauth_authorization_session_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "upstream_oauth_authorization_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "upstream_oauth_provider_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "upstream_oauth_link_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "state", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "code_challenge_verifier", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "nonce", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "id_token", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "id_token_claims", + "type_info": "Jsonb" + }, + { + "ordinal": 8, + "name": "extra_callback_parameters", + "type_info": "Jsonb" + }, + { + "ordinal": 9, + "name": "userinfo", + "type_info": "Jsonb" + }, + { + "ordinal": 10, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 11, + "name": "completed_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 12, + "name": "consumed_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 13, + "name": "unlinked_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + true, + false, + true, + true, + true, + true, + true, + true, + false, + true, + true, + true + ] + }, + "hash": "e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-e68a7084d44462d19f30902d7e6c1bd60bb771c6f075df15ab0137a7ffc896da.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e68a7084d44462d19f30902d7e6c1bd60bb771c6f075df15ab0137a7ffc896da.json new file mode 100644 index 00000000..aa173785 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e68a7084d44462d19f30902d7e6c1bd60bb771c6f075df15ab0137a7ffc896da.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT pg_advisory_xact_lock($1)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "pg_advisory_xact_lock", + "type_info": "Void" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "e68a7084d44462d19f30902d7e6c1bd60bb771c6f075df15ab0137a7ffc896da" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-e8e48db74ac1ab5baa1e4b121643cfa33a0bf3328df6e869464fe7f31429b81e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e8e48db74ac1ab5baa1e4b121643cfa33a0bf3328df6e869464fe7f31429b81e.json new file mode 100644 index 00000000..6f461c85 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e8e48db74ac1ab5baa1e4b121643cfa33a0bf3328df6e869464fe7f31429b81e.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE compat_sso_logins\n SET\n exchanged_at = $2,\n compat_session_id = $3\n WHERE\n compat_sso_login_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "e8e48db74ac1ab5baa1e4b121643cfa33a0bf3328df6e869464fe7f31429b81e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-e99ab37ab3e03ad9c48792772b09bac77b09f67e623d5371ab4dadbe2d41fa1c.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e99ab37ab3e03ad9c48792772b09bac77b09f67e623d5371ab4dadbe2d41fa1c.json new file mode 100644 index 00000000..04ad6dd3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-e99ab37ab3e03ad9c48792772b09bac77b09f67e623d5371ab4dadbe2d41fa1c.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO compat_sessions\n (compat_session_id, user_id, device_id,\n user_session_id, created_at, is_synapse_admin,\n human_name)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Uuid", + "Timestamptz", + "Bool", + "Text" + ] + }, + "nullable": [] + }, + "hash": "e99ab37ab3e03ad9c48792772b09bac77b09f67e623d5371ab4dadbe2d41fa1c" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-eb095f64bec5ac885683a8c6708320760971317c4519fae7af9d44e2be50985d.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-eb095f64bec5ac885683a8c6708320760971317c4519fae7af9d44e2be50985d.json new file mode 100644 index 00000000..2ebaa447 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-eb095f64bec5ac885683a8c6708320760971317c4519fae7af9d44e2be50985d.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE compat_sessions\n SET human_name = $2\n WHERE compat_session_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Text" + ] + }, + "nullable": [] + }, + "hash": "eb095f64bec5ac885683a8c6708320760971317c4519fae7af9d44e2be50985d" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-f0b4af5a9d6f1cc707a935fd5f34526a54ebbed8eef8f885f3a6723bc8490908.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f0b4af5a9d6f1cc707a935fd5f34526a54ebbed8eef8f885f3a6723bc8490908.json new file mode 100644 index 00000000..1622b406 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f0b4af5a9d6f1cc707a935fd5f34526a54ebbed8eef8f885f3a6723bc8490908.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM user_unsupported_third_party_ids\n WHERE user_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "f0b4af5a9d6f1cc707a935fd5f34526a54ebbed8eef8f885f3a6723bc8490908" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-f41f76c94cd68fca2285b1cc60f426603c84df4ef1c6ce5dc441a63d2dc46f6e.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f41f76c94cd68fca2285b1cc60f426603c84df4ef1c6ce5dc441a63d2dc46f6e.json new file mode 100644 index 00000000..e112b131 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f41f76c94cd68fca2285b1cc60f426603c84df4ef1c6ce5dc441a63d2dc46f6e.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO user_sessions (user_session_id, user_id, created_at, user_agent)\n VALUES ($1, $2, $3, $4)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Timestamptz", + "Text" + ] + }, + "nullable": [] + }, + "hash": "f41f76c94cd68fca2285b1cc60f426603c84df4ef1c6ce5dc441a63d2dc46f6e" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-f46e87bbb149b35e1d13b2b3cd2bdeab3c28a56a395f52f001a7bb013a5dfece.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f46e87bbb149b35e1d13b2b3cd2bdeab3c28a56a395f52f001a7bb013a5dfece.json new file mode 100644 index 00000000..8d036abc --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f46e87bbb149b35e1d13b2b3cd2bdeab3c28a56a395f52f001a7bb013a5dfece.json @@ -0,0 +1,58 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n user_recovery_session_id\n , email\n , user_agent\n , ip_address as \"ip_address: IpAddr\"\n , locale\n , created_at\n , consumed_at\n FROM user_recovery_sessions\n WHERE user_recovery_session_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_recovery_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "user_agent", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "ip_address: IpAddr", + "type_info": "Inet" + }, + { + "ordinal": 4, + "name": "locale", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "consumed_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + true, + false, + false, + true + ] + }, + "hash": "f46e87bbb149b35e1d13b2b3cd2bdeab3c28a56a395f52f001a7bb013a5dfece" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-f50b7fb5a2c09e7b7e89e2addb0ca42c790c101a3fc9442862b5885d5116325a.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f50b7fb5a2c09e7b7e89e2addb0ca42c790c101a3fc9442862b5885d5116325a.json new file mode 100644 index 00000000..df75b11b --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f50b7fb5a2c09e7b7e89e2addb0ca42c790c101a3fc9442862b5885d5116325a.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE queue_jobs\n SET\n status = 'failed',\n failed_at = $1,\n failed_reason = $2\n WHERE\n queue_job_id = $3\n AND status = 'running'\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Text", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "f50b7fb5a2c09e7b7e89e2addb0ca42c790c101a3fc9442862b5885d5116325a" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-f5c2ec9b7038d7ed36091e670f9bf34f8aa9ea8ed50929731845e32dc3176e39.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f5c2ec9b7038d7ed36091e670f9bf34f8aa9ea8ed50929731845e32dc3176e39.json new file mode 100644 index 00000000..71096b9a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f5c2ec9b7038d7ed36091e670f9bf34f8aa9ea8ed50929731845e32dc3176e39.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO upstream_oauth_authorization_sessions (\n upstream_oauth_authorization_session_id,\n upstream_oauth_provider_id,\n state,\n code_challenge_verifier,\n nonce,\n created_at,\n completed_at,\n consumed_at,\n id_token,\n userinfo\n ) VALUES ($1, $2, $3, $4, $5, $6, NULL, NULL, NULL, NULL)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Text", + "Text", + "Timestamptz" + ] + }, + "nullable": [] + }, + "hash": "f5c2ec9b7038d7ed36091e670f9bf34f8aa9ea8ed50929731845e32dc3176e39" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-f8182fd162ffb018d4f102fa7ddbc9991135065e81af8f77b5beef9405607577.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f8182fd162ffb018d4f102fa7ddbc9991135065e81af8f77b5beef9405607577.json new file mode 100644 index 00000000..1a715f57 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-f8182fd162ffb018d4f102fa7ddbc9991135065e81af8f77b5beef9405607577.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO queue_schedules (schedule_name)\n SELECT * FROM UNNEST($1::text[]) AS t (schedule_name)\n ON CONFLICT (schedule_name) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "TextArray" + ] + }, + "nullable": [] + }, + "hash": "f8182fd162ffb018d4f102fa7ddbc9991135065e81af8f77b5beef9405607577" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-fbf926f630df5d588df4f1c9c0dc0f594332be5829d5d7c6b66183ac25b3d166.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fbf926f630df5d588df4f1c9c0dc0f594332be5829d5d7c6b66183ac25b3d166.json new file mode 100644 index 00000000..d41b1dcc --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fbf926f630df5d588df4f1c9c0dc0f594332be5829d5d7c6b66183ac25b3d166.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT EXISTS (\n SELECT 1\n FROM information_schema.tables\n WHERE table_name = '_sqlx_migrations'\n ) AS \"exists!\"\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "exists!", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "fbf926f630df5d588df4f1c9c0dc0f594332be5829d5d7c6b66183ac25b3d166" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-fc9925e19000d79c0bb020ea44e13cbb364b3505626d34550e38f6f7397b9d42.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fc9925e19000d79c0bb020ea44e13cbb364b3505626d34550e38f6f7397b9d42.json new file mode 100644 index 00000000..bb080ade --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fc9925e19000d79c0bb020ea44e13cbb364b3505626d34550e38f6f7397b9d42.json @@ -0,0 +1,140 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT oauth2_client_id\n , metadata_digest\n , encrypted_client_secret\n , application_type\n , redirect_uris\n , grant_type_authorization_code\n , grant_type_refresh_token\n , grant_type_client_credentials\n , grant_type_device_code\n , client_name\n , logo_uri\n , client_uri\n , policy_uri\n , tos_uri\n , jwks_uri\n , jwks\n , id_token_signed_response_alg\n , userinfo_signed_response_alg\n , token_endpoint_auth_method\n , token_endpoint_auth_signing_alg\n , initiate_login_uri\n FROM oauth2_clients c\n WHERE is_static = TRUE\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "metadata_digest", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "encrypted_client_secret", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "application_type", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "redirect_uris", + "type_info": "TextArray" + }, + { + "ordinal": 5, + "name": "grant_type_authorization_code", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "grant_type_refresh_token", + "type_info": "Bool" + }, + { + "ordinal": 7, + "name": "grant_type_client_credentials", + "type_info": "Bool" + }, + { + "ordinal": 8, + "name": "grant_type_device_code", + "type_info": "Bool" + }, + { + "ordinal": 9, + "name": "client_name", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "logo_uri", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "client_uri", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "policy_uri", + "type_info": "Text" + }, + { + "ordinal": 13, + "name": "tos_uri", + "type_info": "Text" + }, + { + "ordinal": 14, + "name": "jwks_uri", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "jwks", + "type_info": "Jsonb" + }, + { + "ordinal": 16, + "name": "id_token_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "userinfo_signed_response_alg", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "token_endpoint_auth_method", + "type_info": "Text" + }, + { + "ordinal": 19, + "name": "token_endpoint_auth_signing_alg", + "type_info": "Text" + }, + { + "ordinal": 20, + "name": "initiate_login_uri", + "type_info": "Text" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + true, + true, + true, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true + ] + }, + "hash": "fc9925e19000d79c0bb020ea44e13cbb364b3505626d34550e38f6f7397b9d42" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-fca331753aeccddbad96d06fc9d066dcefebe978a7af477bb6b55faa1d31e9b1.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fca331753aeccddbad96d06fc9d066dcefebe978a7af477bb6b55faa1d31e9b1.json new file mode 100644 index 00000000..c5e2c695 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fca331753aeccddbad96d06fc9d066dcefebe978a7af477bb6b55faa1d31e9b1.json @@ -0,0 +1,64 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT user_registration_token_id,\n token,\n usage_limit,\n times_used,\n created_at,\n last_used_at,\n expires_at,\n revoked_at\n FROM user_registration_tokens\n WHERE token = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_registration_token_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "token", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "usage_limit", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "times_used", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "last_used_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "revoked_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + true, + false, + false, + true, + true, + true + ] + }, + "hash": "fca331753aeccddbad96d06fc9d066dcefebe978a7af477bb6b55faa1d31e9b1" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-fcd8b4b9e003d1540357c6bf1ff9c715560d011d4c01112703a9c046170c84f1.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fcd8b4b9e003d1540357c6bf1ff9c715560d011d4c01112703a9c046170c84f1.json new file mode 100644 index 00000000..f5503fa0 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fcd8b4b9e003d1540357c6bf1ff9c715560d011d4c01112703a9c046170c84f1.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n queue_schedules.schedule_name as \"schedule_name!\",\n queue_schedules.last_scheduled_at,\n queue_jobs.status IN ('completed', 'failed') as last_scheduled_job_completed\n FROM queue_schedules\n LEFT JOIN queue_jobs\n ON queue_jobs.queue_job_id = queue_schedules.last_scheduled_job_id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "schedule_name!", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "last_scheduled_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 2, + "name": "last_scheduled_job_completed", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + true, + null + ] + }, + "hash": "fcd8b4b9e003d1540357c6bf1ff9c715560d011d4c01112703a9c046170c84f1" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-fd32368fa6cd16a9704cdea54f7729681d450669563dd1178c492ffce51e5ff2.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fd32368fa6cd16a9704cdea54f7729681d450669563dd1178c492ffce51e5ff2.json new file mode 100644 index 00000000..b46904cc --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fd32368fa6cd16a9704cdea54f7729681d450669563dd1178c492ffce51e5ff2.json @@ -0,0 +1,76 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT personal_session_id\n , owner_user_id\n , owner_oauth2_client_id\n , actor_user_id\n , scope_list\n , created_at\n , revoked_at\n , human_name\n , last_active_at\n , last_active_ip as \"last_active_ip: IpAddr\"\n FROM personal_sessions\n\n WHERE personal_session_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "personal_session_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "owner_user_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "owner_oauth2_client_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "actor_user_id", + "type_info": "Uuid" + }, + { + "ordinal": 4, + "name": "scope_list", + "type_info": "TextArray" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "revoked_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "human_name", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "last_active_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "last_active_ip: IpAddr", + "type_info": "Inet" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + true, + true, + false, + false, + false, + true, + false, + true, + true + ] + }, + "hash": "fd32368fa6cd16a9704cdea54f7729681d450669563dd1178c492ffce51e5ff2" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50.json new file mode 100644 index 00000000..072e6f57 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE upstream_oauth_authorization_sessions\n SET upstream_oauth_link_id = $1\n , completed_at = $2\n , id_token = $3\n , id_token_claims = $4\n , extra_callback_parameters = $5\n , userinfo = $6\n WHERE upstream_oauth_authorization_session_id = $7\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz", + "Text", + "Jsonb", + "Jsonb", + "Jsonb", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-fe7bd146523e4bb321cb234d6bf9f3005b55c654897a8e46dc933c7fd2263c7c.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fe7bd146523e4bb321cb234d6bf9f3005b55c654897a8e46dc933c7fd2263c7c.json new file mode 100644 index 00000000..9cfabe8e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-fe7bd146523e4bb321cb234d6bf9f3005b55c654897a8e46dc933c7fd2263c7c.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM queue_leader\n WHERE expires_at < NOW()\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "fe7bd146523e4bb321cb234d6bf9f3005b55c654897a8e46dc933c7fd2263c7c" +} diff --git a/matrix-authentication-service/crates/storage-pg/.sqlx/query-ffbfef8b7e72ec4bae02b6bbe862980b5fe575ae8432a000e9c4e4307caa2d9b.json b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ffbfef8b7e72ec4bae02b6bbe862980b5fe575ae8432a000e9c4e4307caa2d9b.json new file mode 100644 index 00000000..77f80712 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/.sqlx/query-ffbfef8b7e72ec4bae02b6bbe862980b5fe575ae8432a000e9c4e4307caa2d9b.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE oauth2_refresh_tokens\n SET consumed_at = $2,\n next_oauth2_refresh_token_id = $3\n WHERE oauth2_refresh_token_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "ffbfef8b7e72ec4bae02b6bbe862980b5fe575ae8432a000e9c4e4307caa2d9b" +} diff --git a/matrix-authentication-service/crates/storage-pg/Cargo.toml b/matrix-authentication-service/crates/storage-pg/Cargo.toml new file mode 100644 index 00000000..c058c3c1 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/Cargo.toml @@ -0,0 +1,44 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-storage-pg" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +async-trait.workspace = true +chrono.workspace = true +crc.workspace = true +futures-util.workspace = true +opentelemetry-semantic-conventions.workspace = true +opentelemetry.workspace = true +rand_chacha.workspace = true +rand.workspace = true +sea-query-binder.workspace = true +sea-query.workspace = true +serde_json.workspace = true +sha2.workspace = true +sqlx.workspace = true +thiserror.workspace = true +tracing.workspace = true +tokio.workspace = true +ulid.workspace = true +url.workspace = true +uuid.workspace = true + +oauth2-types.workspace = true +mas-storage.workspace = true +mas-data-model.workspace = true +mas-iana.workspace = true +mas-jose.workspace = true diff --git a/matrix-authentication-service/crates/storage-pg/build.rs b/matrix-authentication-service/crates/storage-pg/build.rs new file mode 100644 index 00000000..007c4622 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/build.rs @@ -0,0 +1,10 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +fn main() { + // trigger recompilation when a new migration is added + println!("cargo:rerun-if-changed=migrations"); +} diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20220530084123_jobs_workers.sql b/matrix-authentication-service/crates/storage-pg/migrations/20220530084123_jobs_workers.sql new file mode 100644 index 00000000..133a89b4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20220530084123_jobs_workers.sql @@ -0,0 +1,83 @@ + CREATE SCHEMA apalis; + + CREATE TABLE IF NOT EXISTS apalis.workers ( + id TEXT NOT NULL, + worker_type TEXT NOT NULL, + storage_name TEXT NOT NULL, + layers TEXT NOT NULL DEFAULT '', + last_seen timestamptz not null default now() + ); + + CREATE INDEX IF NOT EXISTS Idx ON apalis.workers(id); + + CREATE UNIQUE INDEX IF NOT EXISTS unique_worker_id ON apalis.workers (id); + + CREATE INDEX IF NOT EXISTS WTIdx ON apalis.workers(worker_type); + + CREATE INDEX IF NOT EXISTS LSIdx ON apalis.workers(last_seen); + + CREATE TABLE IF NOT EXISTS apalis.jobs ( + job JSONB NOT NULL, + id TEXT NOT NULL, + job_type TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'Pending', + attempts INTEGER NOT NULL DEFAULT 0, + max_attempts INTEGER NOT NULL DEFAULT 25, + run_at timestamptz NOT NULL default now(), + last_error TEXT, + lock_at timestamptz, + lock_by TEXT, + done_at timestamptz, + CONSTRAINT fk_worker_lock_by FOREIGN KEY(lock_by) REFERENCES apalis.workers(id) + ); + + CREATE INDEX IF NOT EXISTS TIdx ON apalis.jobs(id); + + CREATE INDEX IF NOT EXISTS SIdx ON apalis.jobs(status); + + CREATE UNIQUE INDEX IF NOT EXISTS unique_job_id ON apalis.jobs (id); + + CREATE INDEX IF NOT EXISTS LIdx ON apalis.jobs(lock_by); + + CREATE INDEX IF NOT EXISTS JTIdx ON apalis.jobs(job_type); + + CREATE OR replace FUNCTION apalis.get_job( + worker_id TEXT, + v_job_type TEXT + ) returns apalis.jobs AS $$ + DECLARE + v_job_id text; + v_job_row apalis.jobs; + BEGIN + SELECT id, job_type + INTO v_job_id, v_job_type + FROM apalis.jobs + WHERE status = 'Pending' + AND run_at < now() + AND job_type = v_job_type + ORDER BY run_at ASC limit 1 FOR UPDATE skip LOCKED; + + IF v_job_id IS NULL THEN + RETURN NULL; + END IF; + + UPDATE apalis.jobs + SET + status = 'Running', + lock_by = worker_id, + lock_at = now() + WHERE id = v_job_id + returning * INTO v_job_row; + RETURN v_job_row; + END; + $$ LANGUAGE plpgsql volatile; + + CREATE FUNCTION apalis.notify_new_jobs() returns trigger as $$ + BEGIN + perform pg_notify('apalis::job', 'insert'); + return new; + END; + $$ language plpgsql; + + CREATE TRIGGER notify_workers after insert on apalis.jobs for each statement execute procedure apalis.notify_new_jobs(); + diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20221018142001_init.sql b/matrix-authentication-service/crates/storage-pg/migrations/20221018142001_init.sql new file mode 100644 index 00000000..9c91adec --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20221018142001_init.sql @@ -0,0 +1,351 @@ +-- Copyright 2022 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +----------- +-- Users -- +----------- + +CREATE TABLE "users" ( + "user_id" UUID NOT NULL + CONSTRAINT "users_pkey" + PRIMARY KEY, + + "username" TEXT NOT NULL + CONSTRAINT "users_username_unique" + UNIQUE, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL +); + +CREATE TABLE "user_passwords" ( + "user_password_id" UUID NOT NULL + CONSTRAINT "user_passwords_pkey" + PRIMARY KEY, + + "user_id" UUID NOT NULL + CONSTRAINT "user_passwords_user_id_fkey" + REFERENCES "users" ("user_id"), + + "hashed_password" TEXT NOT NULL, + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL +); + +CREATE TABLE "user_emails" ( + "user_email_id" UUID NOT NULL + CONSTRAINT "user_emails_pkey" + PRIMARY KEY, + + "user_id" UUID NOT NULL + CONSTRAINT "user_emails_user_id_fkey" + REFERENCES "users" ("user_id") + ON DELETE CASCADE, + + "email" TEXT NOT NULL, + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "confirmed_at" TIMESTAMP WITH TIME ZONE +); + +ALTER TABLE "users" + ADD COLUMN "primary_user_email_id" UUID + CONSTRAINT "users_primary_user_email_id_fkey" + REFERENCES "user_emails" ("user_email_id") + ON DELETE SET NULL; + +CREATE TABLE "user_email_confirmation_codes" ( + "user_email_confirmation_code_id" UUID NOT NULL + CONSTRAINT "user_email_confirmation_codes_pkey" + PRIMARY KEY, + + "user_email_id" UUID NOT NULL + CONSTRAINT "user_email_confirmation_codes_user_email_id_fkey" + REFERENCES "user_emails" ("user_email_id"), + + "code" TEXT NOT NULL + CONSTRAINT "user_email_confirmation_codes_code_unique" + UNIQUE, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "expires_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "consumed_at" TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE "user_sessions" ( + "user_session_id" UUID NOT NULL + CONSTRAINT "user_sessions_pkey" + PRIMARY KEY, + + "user_id" UUID NOT NULL + CONSTRAINT "user_sessions_user_id_fkey" + REFERENCES "users" ("user_id"), + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "finished_at" TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE "user_session_authentications" ( + "user_session_authentication_id" UUID NOT NULL + CONSTRAINT "user_session_authentications_pkey" + PRIMARY KEY, + + "user_session_id" UUID NOT NULL + CONSTRAINT "user_session_authentications_user_session_id_fkey" + REFERENCES "user_sessions" ("user_session_id"), + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL +); + +--------------------- +-- Compat sessions -- +--------------------- + +CREATE TABLE "compat_sessions" ( + "compat_session_id" UUID NOT NULL + CONSTRAINT "compat_sessions_pkey" + PRIMARY KEY, + + "user_id" UUID NOT NULL + CONSTRAINT "compat_sessions_user_id_fkey" + REFERENCES "users" ("user_id"), + + "device_id" TEXT NOT NULL + CONSTRAINT "compat_sessions_device_id_unique" + UNIQUE, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "finished_at" TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE "compat_sso_logins" ( + "compat_sso_login_id" UUID NOT NULL + CONSTRAINT "compat_sso_logins_pkey" + PRIMARY KEY, + + "redirect_uri" TEXT NOT NULL, + + "login_token" TEXT NOT NULL + CONSTRAINT "compat_sessions_login_token_unique" + UNIQUE, + + "compat_session_id" UUID + CONSTRAINT "compat_sso_logins_compat_session_id_fkey" + REFERENCES "compat_sessions" ("compat_session_id") + ON DELETE SET NULL, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "fulfilled_at" TIMESTAMP WITH TIME ZONE, + "exchanged_at" TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE "compat_access_tokens" ( + "compat_access_token_id" UUID NOT NULL + CONSTRAINT "compat_access_tokens_pkey" + PRIMARY KEY, + + "compat_session_id" UUID NOT NULL + CONSTRAINT "compat_access_tokens_compat_session_id_fkey" + REFERENCES "compat_sessions" ("compat_session_id"), + + "access_token" TEXT NOT NULL + CONSTRAINT "compat_access_tokens_access_token_unique" + UNIQUE, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "expires_at" TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE "compat_refresh_tokens" ( + "compat_refresh_token_id" UUID NOT NULL + CONSTRAINT "compat_refresh_tokens_pkey" + PRIMARY KEY, + + "compat_session_id" UUID NOT NULL + CONSTRAINT "compat_refresh_tokens_compat_session_id_fkey" + REFERENCES "compat_sessions" ("compat_session_id"), + + "compat_access_token_id" UUID NOT NULL + CONSTRAINT "compat_refresh_tokens_compat_access_token_id_fkey" + REFERENCES "compat_access_tokens" ("compat_access_token_id"), + + "refresh_token" TEXT NOT NULL + CONSTRAINT "compat_refresh_tokens_refresh_token_unique" + UNIQUE, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "consumed_at" TIMESTAMP WITH TIME ZONE +); + +---------------- +-- OAuth 2.0 --- +---------------- + +CREATE TABLE "oauth2_clients" ( + "oauth2_client_id" UUID NOT NULL + CONSTRAINT "oauth2_clients_pkey" + PRIMARY KEY, + + "encrypted_client_secret" TEXT, + + "grant_type_authorization_code" BOOLEAN NOT NULL, + "grant_type_refresh_token" BOOLEAN NOT NULL, + + "client_name" TEXT, + "logo_uri" TEXT, + "client_uri" TEXT, + "policy_uri" TEXT, + "tos_uri" TEXT, + + "jwks_uri" TEXT, + "jwks" JSONB, + + "id_token_signed_response_alg" TEXT, + "token_endpoint_auth_method" TEXT, + "token_endpoint_auth_signing_alg" TEXT, + "initiate_login_uri" TEXT, + "userinfo_signed_response_alg" TEXT, + + "created_at" TIMESTAMP WITH TIME ZONE NULL +); + +CREATE TABLE "oauth2_client_redirect_uris" ( + "oauth2_client_redirect_uri_id" UUID NOT NULL + CONSTRAINT "oauth2_client_redirect_uris_pkey" + PRIMARY KEY, + + "oauth2_client_id" UUID NOT NULL + CONSTRAINT "tbl_oauth2_client_id_fkey" + REFERENCES "oauth2_clients" ("oauth2_client_id"), + + "redirect_uri" TEXT NOT NULL +); + +CREATE TABLE "oauth2_sessions" ( + "oauth2_session_id" UUID NOT NULL + CONSTRAINT "oauth2_sessions_pkey" + PRIMARY KEY, + + "user_session_id" UUID NOT NULL + CONSTRAINT "oauth2_sessions_user_session_id_fkey" + REFERENCES "user_sessions" ("user_session_id"), + + "oauth2_client_id" UUID NOT NULL + CONSTRAINT "oauth2_sessions_oauth2_client_id_fkey" + REFERENCES "oauth2_clients" ("oauth2_client_id"), + + "scope" TEXT NOT NULL, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "finished_at" TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE "oauth2_consents" ( + "oauth2_consent_id" UUID NOT NULL + CONSTRAINT "oauth2_consents_pkey" + PRIMARY KEY, + + "oauth2_client_id" UUID NOT NULL + CONSTRAINT "oauth2_consents_oauth2_client_id_fkey" + REFERENCES "oauth2_clients" ("oauth2_client_id"), + + "user_id" UUID NOT NULL + CONSTRAINT "oauth2_consents_user_id_fkey" + REFERENCES "users" ("user_id"), + + "scope_token" TEXT NOT NULL, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "refreshed_at" TIMESTAMP WITH TIME ZONE, + + CONSTRAINT "oauth2_consents_unique" + UNIQUE ("oauth2_client_id", "user_id", "scope_token") +); + +CREATE INDEX "oauth2_consents_oauth2_client_id_user_id" + ON "oauth2_consents" ("oauth2_client_id", "user_id"); + +CREATE TABLE "oauth2_access_tokens" ( + "oauth2_access_token_id" UUID NOT NULL + CONSTRAINT "oauth2_access_tokens_pkey" + PRIMARY KEY, + + "oauth2_session_id" UUID NOT NULL + CONSTRAINT "oauth2_access_tokens_oauth2_session_id_fkey" + REFERENCES "oauth2_sessions" ("oauth2_session_id"), + + "access_token" TEXT NOT NULL + CONSTRAINT "oauth2_access_tokens_unique" + UNIQUE, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "expires_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "revoked_at" TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE "oauth2_refresh_tokens" ( + "oauth2_refresh_token_id" UUID NOT NULL + CONSTRAINT "oauth2_refresh_tokens_pkey" + PRIMARY KEY, + + "oauth2_session_id" UUID NOT NULL + CONSTRAINT "oauth2_access_tokens_oauth2_session_id_fkey" + REFERENCES "oauth2_sessions" ("oauth2_session_id"), + + "oauth2_access_token_id" UUID + CONSTRAINT "oauth2_refresh_tokens_oauth2_access_token_id_fkey" + REFERENCES "oauth2_access_tokens" ("oauth2_access_token_id") + ON DELETE SET NULL, + + "refresh_token" TEXT NOT NULL + CONSTRAINT "oauth2_refresh_tokens_unique" + UNIQUE, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "consumed_at" TIMESTAMP WITH TIME ZONE, + "revoked_at" TIMESTAMP WITH TIME ZONE +); + +CREATE TABLE "oauth2_authorization_grants" ( + "oauth2_authorization_grant_id" UUID NOT NULL + CONSTRAINT "oauth2_authorization_grants_pkey" + PRIMARY KEY, + + "oauth2_client_id" UUID NOT NULL + CONSTRAINT "tbl_oauth2_client_fkey" + REFERENCES "oauth2_clients" ("oauth2_client_id"), + + "oauth2_session_id" UUID + CONSTRAINT "tbl_oauth2_session_fkey" + REFERENCES "oauth2_sessions" ("oauth2_session_id"), + + "authorization_code" TEXT + CONSTRAINT "oauth2_authorization_grants_authorization_code_unique" + UNIQUE, + + "redirect_uri" TEXT NOT NULL, + + "scope" TEXT NOT NULL, + "state" TEXT, + "nonce" TEXT, + "max_age" INTEGER, + "response_mode" TEXT NOT NULL, + "code_challenge_method" TEXT, + "code_challenge" TEXT, + "response_type_code" BOOLEAN NOT NULL, + "response_type_id_token" BOOLEAN NOT NULL, + "requires_consent" BOOLEAN NOT NULL, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "fulfilled_at" TIMESTAMP WITH TIME ZONE, + "cancelled_at" TIMESTAMP WITH TIME ZONE, + "exchanged_at" TIMESTAMP WITH TIME ZONE +); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20221121151402_upstream_oauth.sql b/matrix-authentication-service/crates/storage-pg/migrations/20221121151402_upstream_oauth.sql new file mode 100644 index 00000000..86a117dd --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20221121151402_upstream_oauth.sql @@ -0,0 +1,91 @@ +-- Copyright 2022 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +CREATE TABLE "upstream_oauth_providers" ( + "upstream_oauth_provider_id" UUID NOT NULL + CONSTRAINT "upstream_oauth_providers_pkey" + PRIMARY KEY, + + "issuer" TEXT NOT NULL, + + "scope" TEXT NOT NULL, + + "client_id" TEXT NOT NULL, + + -- Used for client_secret_basic, client_secret_post and client_secret_jwt auth methods + "encrypted_client_secret" TEXT, + + -- Used for client_secret_jwt and private_key_jwt auth methods + "token_endpoint_signing_alg" TEXT, + + "token_endpoint_auth_method" TEXT NOT NULL, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL +); + +CREATE TABLE "upstream_oauth_links" ( + "upstream_oauth_link_id" UUID NOT NULL + CONSTRAINT "upstream_oauth_links_pkey" + PRIMARY KEY, + + "upstream_oauth_provider_id" UUID NOT NULL + CONSTRAINT "upstream_oauth_links_provider_fkey" + REFERENCES "upstream_oauth_providers" ("upstream_oauth_provider_id"), + + -- The user is initially NULL when logging in the first time. + -- It then either links to an existing account, or creates a new one from scratch. + "user_id" UUID + CONSTRAINT "upstream_oauth_link_user_fkey" + REFERENCES "users" ("user_id"), + + "subject" TEXT NOT NULL, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + + -- There should only be one entry per subject/provider tuple + CONSTRAINT "upstream_oauth_links_subject_unique" + UNIQUE ("upstream_oauth_provider_id", "subject") +); + +CREATE TABLE "upstream_oauth_authorization_sessions" ( + "upstream_oauth_authorization_session_id" UUID NOT NULL + CONSTRAINT "upstream_oauth_authorization_sessions_pkey" + PRIMARY KEY, + + "upstream_oauth_provider_id" UUID NOT NULL + CONSTRAINT "upstream_oauth_authorization_sessions_provider_fkey" + REFERENCES "upstream_oauth_providers" ("upstream_oauth_provider_id"), + + -- The link it resolves to at the end of the authorization grant + "upstream_oauth_link_id" UUID + CONSTRAINT "upstream_oauth_authorization_sessions_link_fkey" + REFERENCES "upstream_oauth_links" ("upstream_oauth_link_id"), + + -- The ID token we got at the end of the authorization grant + "id_token" TEXT, + + "state" TEXT NOT NULL + CONSTRAINT "upstream_oauth_authorization_sessions_state_unique" + UNIQUE, + + "code_challenge_verifier" TEXT, + "nonce" TEXT NOT NULL, + + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + + -- When the session turned into a link + "completed_at" TIMESTAMP WITH TIME ZONE, + -- When the session turned into a user session authentication + "consumed_at" TIMESTAMP WITH TIME ZONE +); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20221213145242_password_schemes.sql b/matrix-authentication-service/crates/storage-pg/migrations/20221213145242_password_schemes.sql new file mode 100644 index 00000000..1ccdba51 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20221213145242_password_schemes.sql @@ -0,0 +1,24 @@ +-- Copyright 2022 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +ALTER TABLE "user_passwords" + ADD COLUMN "version" INTEGER NOT NULL DEFAULT 1, + ADD COLUMN "upgraded_from_id" UUID + CONSTRAINT "user_passwords_upgraded_from_id_fkey" + REFERENCES "user_passwords" ("user_password_id") + ON DELETE SET NULL; + +-- Remove the default after creating the column +ALTER TABLE "user_passwords" + ALTER COLUMN "version" DROP DEFAULT; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230408234928_add_get_jobs_fn_.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230408234928_add_get_jobs_fn_.sql new file mode 100644 index 00000000..660eca47 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230408234928_add_get_jobs_fn_.sql @@ -0,0 +1,27 @@ +DROP FUNCTION apalis.get_job( + worker_id TEXT, + v_job_type TEXT + ); + +CREATE OR replace FUNCTION apalis.get_jobs( + worker_id TEXT, + v_job_type TEXT, + v_job_count integer DEFAULT 5 :: integer + ) returns setof apalis.jobs AS $$ BEGIN RETURN QUERY +UPDATE apalis.jobs +SET status = 'Running', + lock_by = worker_id, + lock_at = now() +WHERE id IN ( + SELECT id + FROM apalis.jobs + WHERE status = 'Pending' + AND run_at < now() + AND job_type = v_job_type + ORDER BY run_at ASC + limit v_job_count FOR + UPDATE skip LOCKED + ) +returning *; +END; +$$ LANGUAGE plpgsql volatile; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230616093555_compat_admin_flag.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230616093555_compat_admin_flag.sql new file mode 100644 index 00000000..ce42c6f9 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230616093555_compat_admin_flag.sql @@ -0,0 +1,16 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +ALTER TABLE compat_sessions + ADD COLUMN is_synapse_admin BOOLEAN NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230621140528_upstream_oauth_claims_imports.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230621140528_upstream_oauth_claims_imports.sql new file mode 100644 index 00000000..32f58c28 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230621140528_upstream_oauth_claims_imports.sql @@ -0,0 +1,19 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +ALTER TABLE upstream_oauth_providers + ADD COLUMN claims_imports + JSONB + NOT NULL + DEFAULT '{}'; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230626130338_oauth_clients_static.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230626130338_oauth_clients_static.sql new file mode 100644 index 00000000..6df88b33 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230626130338_oauth_clients_static.sql @@ -0,0 +1,19 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- This adds a flag to the OAuth 2.0 clients to indicate whether they are static (i.e. defined in config) or not. +ALTER TABLE oauth2_clients + ADD COLUMN is_static + BOOLEAN NOT NULL + DEFAULT FALSE; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230728154304_user_lock.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230728154304_user_lock.sql new file mode 100644 index 00000000..a015c752 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230728154304_user_lock.sql @@ -0,0 +1,19 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Add a new column in on the `users` to record when an account gets locked +ALTER TABLE "users" + ADD COLUMN "locked_at" + TIMESTAMP WITH TIME ZONE + DEFAULT NULL; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230823125247_drop_apalis_push_job.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230823125247_drop_apalis_push_job.sql new file mode 100644 index 00000000..1ef75bd8 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230823125247_drop_apalis_push_job.sql @@ -0,0 +1,53 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +/** + * This fully drops any existing push_job functions, as we're not relying on them anymore + */ + +-- Temporarily change the client_min_messages to suppress the NOTICEs +SET client_min_messages = 'ERROR'; + +DROP FUNCTION IF EXISTS apalis.push_job( + job_type text, + job json, + job_id text, + status text, + run_at timestamptz, + max_attempts integer +); + +DROP FUNCTION IF EXISTS apalis.push_job( + job_type text, + job json, + status text, + run_at timestamptz, + max_attempts integer +); + +-- Reset the client_min_messages +RESET client_min_messages; + +/** + * Remove the old applied migrations in case they were applied: + * - 20220709210445_add_job_fn.sql + * - 20230330210841_replace_add_job_fn.sql + * - 20230408110421_drop_old_push_job.sql + */ +DELETE FROM public._sqlx_migrations +WHERE version IN ( + 20220709210445, + 20230330210841, + 20230408110421 +); \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230828085439_oauth2_clients_more_fields.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230828085439_oauth2_clients_more_fields.sql new file mode 100644 index 00000000..8cc7855f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230828085439_oauth2_clients_more_fields.sql @@ -0,0 +1,32 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Adds a few fields to OAuth 2.0 clients, and squash the redirect_uris in the same table + +ALTER TABLE "oauth2_clients" + ADD COLUMN "redirect_uris" TEXT[] NOT NULL DEFAULT '{}', + ADD COLUMN "application_type" TEXT, + ADD COLUMN "contacts" TEXT[] NOT NULL DEFAULT '{}'; + +-- Insert in the new `redirect_uris` column the values from the old table +UPDATE "oauth2_clients" + SET "redirect_uris" = ARRAY( + SELECT "redirect_uri" + FROM "oauth2_client_redirect_uris" + WHERE "oauth2_client_redirect_uris"."oauth2_client_id" = "oauth2_clients"."oauth2_client_id" + GROUP BY "redirect_uri" + ); + +-- Drop the old table +DROP TABLE "oauth2_client_redirect_uris"; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230828143553_user_session_authentication_source.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230828143553_user_session_authentication_source.sql new file mode 100644 index 00000000..77a45348 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230828143553_user_session_authentication_source.sql @@ -0,0 +1,23 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- This adds the source of each authentication of a user_session +ALTER TABLE "user_session_authentications" + ADD COLUMN "user_password_id" UUID + REFERENCES "user_passwords" ("user_password_id") + ON DELETE SET NULL, + + ADD COLUMN "upstream_oauth_authorization_session_id" UUID + REFERENCES "upstream_oauth_authorization_sessions" ("upstream_oauth_authorization_session_id") + ON DELETE SET NULL; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230829092920_oauth2_sessions_user_id_scope_list.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230829092920_oauth2_sessions_user_id_scope_list.sql new file mode 100644 index 00000000..acfeda2c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230829092920_oauth2_sessions_user_id_scope_list.sql @@ -0,0 +1,49 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- We need to be able to do OAuth 2.0 sessions without a user session, +-- and we would like to find sessions with a particular scope. +-- +-- This migration edits the "oauth2_sessions" table to: +-- * Add a "user_id" column +-- * Make the "user_session_id" nullable +-- * Infer the "user_id" from the "user_session_id" for existing rows +-- * Add a "scope_list" column, which is the "scope" column in array form +-- * Import the "scope" column into the "scope_list" column for existing rows by splitting on spaces +-- * Sets both columns as NOT NULL once the migration is complete +-- * Drop the "scope" column +-- * Index the "scope_list" column with a GIN index + +ALTER TABLE "oauth2_sessions" + ADD COLUMN "user_id" UUID + REFERENCES "users" ("user_id") ON DELETE CASCADE, + ADD COLUMN "scope_list" TEXT[]; + +UPDATE "oauth2_sessions" + SET "user_id" = "user_sessions"."user_id" + FROM "user_sessions" + WHERE "oauth2_sessions"."user_session_id" = "user_sessions"."user_session_id"; + +UPDATE "oauth2_sessions" + SET "scope_list" = string_to_array("scope", ' ') + WHERE "scope_list" IS NULL; + +ALTER TABLE "oauth2_sessions" + ALTER COLUMN "user_session_id" DROP NOT NULL, + ALTER COLUMN "user_id" SET NOT NULL, + ALTER COLUMN "scope_list" SET NOT NULL, + DROP COLUMN "scope"; + +CREATE INDEX "oauth2_sessions_scope_list_idx" + ON "oauth2_sessions" USING GIN ("scope_list"); \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230829141928_user_session_user_agent.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230829141928_user_session_user_agent.sql new file mode 100644 index 00000000..04562606 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230829141928_user_session_user_agent.sql @@ -0,0 +1,16 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- This adds a user_agent column to the user_sessions table +ALTER TABLE user_sessions ADD COLUMN user_agent TEXT; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230904135550_oauth2_client_credentials_grant.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230904135550_oauth2_client_credentials_grant.sql new file mode 100644 index 00000000..1fffbc33 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230904135550_oauth2_client_credentials_grant.sql @@ -0,0 +1,21 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- This makes the user_id in the oauth2_sessions nullable, which allows us to create user-less sessions +ALTER TABLE oauth2_sessions + ALTER COLUMN user_id DROP NOT NULL; + +-- This adds a column to the oauth2_clients to allow them to use the client_credentials flow +ALTER TABLE oauth2_clients + ADD COLUMN grant_type_client_credentials boolean NOT NULL DEFAULT false; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230911091636_oauth2_token_expiration.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230911091636_oauth2_token_expiration.sql new file mode 100644 index 00000000..beebb617 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230911091636_oauth2_token_expiration.sql @@ -0,0 +1,19 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- This makes the `expires_at` column nullable on the `oauth2_access_tokens`. +-- This is to allow permanent tokens to be created via the admin API. +ALTER TABLE oauth2_access_tokens + ALTER COLUMN expires_at DROP NOT NULL; + diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20230919155444_record_session_last_activity.sql b/matrix-authentication-service/crates/storage-pg/migrations/20230919155444_record_session_last_activity.sql new file mode 100644 index 00000000..0336c9df --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20230919155444_record_session_last_activity.sql @@ -0,0 +1,39 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- This adds a `last_active_at` timestamp and a `last_active_ip` column +-- to the `oauth2_sessions`, `user_sessions` and `compat_sessions` tables. +-- The timestamp is indexed with the `user_id`, as they are likely to be queried together. +ALTER TABLE "oauth2_sessions" + ADD COLUMN "last_active_at" TIMESTAMP WITH TIME ZONE, + ADD COLUMN "last_active_ip" INET; + +CREATE INDEX "oauth2_sessions_user_id_last_active_at" + ON "oauth2_sessions" ("user_id", "last_active_at"); + + +ALTER TABLE "user_sessions" + ADD COLUMN "last_active_at" TIMESTAMP WITH TIME ZONE, + ADD COLUMN "last_active_ip" INET; + +CREATE INDEX "user_sessions_user_id_last_active_at" + ON "user_sessions" ("user_id", "last_active_at"); + + +ALTER TABLE "compat_sessions" + ADD COLUMN "last_active_at" TIMESTAMP WITH TIME ZONE, + ADD COLUMN "last_active_ip" INET; + +CREATE INDEX "compat_sessions_user_id_last_active_at" + ON "compat_sessions" ("user_id", "last_active_at"); \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20231009142904_user_can_request_admin.sql b/matrix-authentication-service/crates/storage-pg/migrations/20231009142904_user_can_request_admin.sql new file mode 100644 index 00000000..a545f477 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20231009142904_user_can_request_admin.sql @@ -0,0 +1,17 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Adds a `can_request_admin` column to the `users` table +ALTER TABLE users + ADD COLUMN can_request_admin BOOLEAN NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20231116104353_upstream_oauth_overrides.sql b/matrix-authentication-service/crates/storage-pg/migrations/20231116104353_upstream_oauth_overrides.sql new file mode 100644 index 00000000..a4e71b2c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20231116104353_upstream_oauth_overrides.sql @@ -0,0 +1,21 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Adds various endpoint overrides for oauth providers +ALTER TABLE upstream_oauth_providers + ADD COLUMN "jwks_uri_override" TEXT, + ADD COLUMN "authorization_endpoint_override" TEXT, + ADD COLUMN "token_endpoint_override" TEXT, + ADD COLUMN "discovery_mode" TEXT NOT NULL DEFAULT 'oidc', + ADD COLUMN "pkce_mode" TEXT NOT NULL DEFAULT 'auto'; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20231120110559_upstream_oauth_branding.sql b/matrix-authentication-service/crates/storage-pg/migrations/20231120110559_upstream_oauth_branding.sql new file mode 100644 index 00000000..1f54c86f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20231120110559_upstream_oauth_branding.sql @@ -0,0 +1,18 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Adds human readable branding information to the upstream_oauth_providers table +ALTER TABLE upstream_oauth_providers + ADD COLUMN human_name text, + ADD COLUMN brand_name text; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20231207090532_oauth_device_code_grant.sql b/matrix-authentication-service/crates/storage-pg/migrations/20231207090532_oauth_device_code_grant.sql new file mode 100644 index 00000000..b90dd150 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20231207090532_oauth_device_code_grant.sql @@ -0,0 +1,82 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +--- Adds a table to store device codes for OAuth 2.0 device code flows +-- +-- +-- This has 4 possible states, only going in one direction: +-- +-- [[ Pending ]] +-- | | +-- | [ Rejected ] -- The `rejected_at` and `user_session_id` fields are set +-- | +-- [ Fulfilled ] -- The `fulfilled_at` and `user_session_id` fields are set +-- | +-- [ Exchanged ] -- The `exchanged_at` and `oauth2_session_id` fields are also set +-- +CREATE TABLE "oauth2_device_code_grant" ( + "oauth2_device_code_grant_id" UUID NOT NULL + PRIMARY KEY, + + -- The client who initiated the device code grant + "oauth2_client_id" UUID NOT NULL + REFERENCES "oauth2_clients" ("oauth2_client_id") + ON DELETE CASCADE, + + -- The scope requested + "scope" TEXT NOT NULL, + + -- The random code that is displayed to the user + "user_code" TEXT NOT NULL + UNIQUE, + + -- The random code that the client uses to poll for the access token + "device_code" TEXT NOT NULL + UNIQUE, + + -- Timestamp when the device code was created + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + + -- Timestamp when the device code expires + "expires_at" TIMESTAMP WITH TIME ZONE NOT NULL, + + -- When the device code was fulfilled, i.e. the user has granted access + -- This is mutually exclusive with rejected_at + "fulfilled_at" TIMESTAMP WITH TIME ZONE, + + -- When the device code was rejected, i.e. the user has denied access + -- This is mutually exclusive with fulfilled_at + "rejected_at" TIMESTAMP WITH TIME ZONE, + + -- When the device code was exchanged + -- This means "fulfilled_at" has also been set + "exchanged_at" TIMESTAMP WITH TIME ZONE, + + -- The OAuth 2.0 session generated for this device code + -- This means "exchanged_at" has also been set + "oauth2_session_id" UUID + REFERENCES "oauth2_sessions" ("oauth2_session_id") + ON DELETE CASCADE, + + -- The browser session ID that the user used to authenticate + -- This means "fulfilled_at" or "rejected_at" has also been set + "user_session_id" UUID + REFERENCES "user_sessions" ("user_session_id"), + + -- The IP address of the user when they authenticated + "ip_address" INET, + + -- The user agent of the user when they authenticated + "user_agent" TEXT +); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20231208155602_oauth_clients_device_code_grant.sql b/matrix-authentication-service/crates/storage-pg/migrations/20231208155602_oauth_clients_device_code_grant.sql new file mode 100644 index 00000000..569db26e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20231208155602_oauth_clients_device_code_grant.sql @@ -0,0 +1,18 @@ +-- Copyright 2023 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Add a flag on oauth_clients to indicate whether they support the device code grant +ALTER TABLE oauth2_clients + ADD COLUMN grant_type_device_code BOOLEAN + NOT NULL DEFAULT FALSE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20240207100003_user_terms.sql b/matrix-authentication-service/crates/storage-pg/migrations/20240207100003_user_terms.sql new file mode 100644 index 00000000..d94888a8 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20240207100003_user_terms.sql @@ -0,0 +1,32 @@ +-- Copyright 2024 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Track when users have accepted the terms of service, and which version they accepted. +CREATE TABLE user_terms ( + "user_terms_id" UUID NOT NULL + PRIMARY KEY, + + -- The user who accepted the terms of service. + "user_id" UUID NOT NULL + REFERENCES users (user_id) ON DELETE CASCADE, + + -- The URL of the terms of service that the user accepted. + "terms_url" TEXT NOT NULL, + + -- When the user accepted the terms of service. + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + + -- Unique constraint to ensure that a user can only accept a given version of the terms once. + UNIQUE ("user_id", "terms_url") +); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20240220141353_nonunique_compat_device_id.sql b/matrix-authentication-service/crates/storage-pg/migrations/20240220141353_nonunique_compat_device_id.sql new file mode 100644 index 00000000..3c65c673 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20240220141353_nonunique_compat_device_id.sql @@ -0,0 +1,17 @@ +-- Copyright 2024 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Drops the unique constraint on the device_id column in the compat_sessions table +ALTER TABLE compat_sessions + DROP CONSTRAINT compat_sessions_device_id_unique; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20240220150201_compat_sessions_user_sessions_link.sql b/matrix-authentication-service/crates/storage-pg/migrations/20240220150201_compat_sessions_user_sessions_link.sql new file mode 100644 index 00000000..8a33f91d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20240220150201_compat_sessions_user_sessions_link.sql @@ -0,0 +1,19 @@ +-- Copyright 2024 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Adds an optional link between the compatibility sessions and the user sessions +ALTER TABLE compat_sessions + ADD COLUMN user_session_id UUID + REFERENCES user_sessions (user_session_id) + ON DELETE SET NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20240221164945_sessions_user_agent.sql b/matrix-authentication-service/crates/storage-pg/migrations/20240221164945_sessions_user_agent.sql new file mode 100644 index 00000000..5b1ce269 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20240221164945_sessions_user_agent.sql @@ -0,0 +1,17 @@ +-- Copyright 2024 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Adds user agent columns to oauth and compat sessions tables +ALTER TABLE oauth2_sessions ADD COLUMN user_agent TEXT; +ALTER TABLE compat_sessions ADD COLUMN user_agent TEXT; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20240301091201_upstream_oauth_additional_parameters.sql b/matrix-authentication-service/crates/storage-pg/migrations/20240301091201_upstream_oauth_additional_parameters.sql new file mode 100644 index 00000000..e941636f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20240301091201_upstream_oauth_additional_parameters.sql @@ -0,0 +1,18 @@ +-- Copyright 2024 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Adds a column to the upstream_oauth_providers table to store additional parameters to be sent to the OAuth provider. +-- Parameters are stored as [["key", "value"], ["key", "value"], ...] in a JSONB column to keep key ordering. +ALTER TABLE upstream_oauth_providers + ADD COLUMN additional_parameters JSONB; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20240402084854_upstream_oauth_disabled_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20240402084854_upstream_oauth_disabled_at.sql new file mode 100644 index 00000000..d1469198 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20240402084854_upstream_oauth_disabled_at.sql @@ -0,0 +1,18 @@ +-- Copyright 2024 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + + +-- Adds a `disabled_at` column to the `upstream_oauth_providers` table, to soft-delete providers. +ALTER TABLE "upstream_oauth_providers" + ADD COLUMN "disabled_at" TIMESTAMP WITH TIME ZONE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20240621080509_user_recovery.sql b/matrix-authentication-service/crates/storage-pg/migrations/20240621080509_user_recovery.sql new file mode 100644 index 00000000..b75c49b0 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20240621080509_user_recovery.sql @@ -0,0 +1,64 @@ +-- Copyright 2024 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- Stores user recovery sessions for when the user lost their credentials. +CREATE TABLE "user_recovery_sessions" ( + "user_recovery_session_id" UUID NOT NULL + CONSTRAINT "user_recovery_sessions_pkey" + PRIMARY KEY, + + -- The email address for which the recovery session was requested + "email" TEXT NOT NULL, + + -- The user agent of the client that requested the recovery session + "user_agent" TEXT NOT NULL, + + -- The IP address of the client that requested the recovery session + "ip_address" INET, + + -- The language of the client that requested the recovery session + "locale" TEXT NOT NULL, + + -- When the recovery session was created + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + + -- When the recovery session was consumed + "consumed_at" TIMESTAMP WITH TIME ZONE +); + +-- Stores the recovery tickets for a user recovery session. +CREATE TABLE "user_recovery_tickets" ( + "user_recovery_ticket_id" UUID NOT NULL + CONSTRAINT "user_recovery_tickets_pkey" + PRIMARY KEY, + + -- The recovery session this ticket belongs to + "user_recovery_session_id" UUID NOT NULL + REFERENCES "user_recovery_sessions" ("user_recovery_session_id") + ON DELETE CASCADE, + + -- The user_email for which the recovery ticket was generated + "user_email_id" UUID NOT NULL + REFERENCES "user_emails" ("user_email_id") + ON DELETE CASCADE, + + -- The recovery ticket + "ticket" TEXT NOT NULL, + + -- When the recovery ticket was created + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + + -- When the recovery ticket expires + "expires_at" TIMESTAMP WITH TIME ZONE NOT NULL +); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20240718075125_sessions_active_index.sql b/matrix-authentication-service/crates/storage-pg/migrations/20240718075125_sessions_active_index.sql new file mode 100644 index 00000000..212a7a05 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20240718075125_sessions_active_index.sql @@ -0,0 +1,19 @@ +-- Copyright 2024 The Matrix.org Foundation C.I.C. +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + + +-- Adds an index on the last_active_at column of the sessions tables +CREATE INDEX "compat_sessions_last_active_at_idx" ON "compat_sessions" ("last_active_at"); +CREATE INDEX "oauth2_sessions_last_active_at_idx" ON "oauth2_sessions" ("last_active_at"); +CREATE INDEX "user_sessions_last_active_at_idx" ON "user_sessions" ("last_active_at"); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241004075132_queue_worker.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241004075132_queue_worker.sql new file mode 100644 index 00000000..07b49d22 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241004075132_queue_worker.sql @@ -0,0 +1,37 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- This table stores informations about worker, mostly to track their health +CREATE TABLE queue_workers ( + queue_worker_id UUID NOT NULL PRIMARY KEY, + + -- When the worker was registered + registered_at TIMESTAMP WITH TIME ZONE NOT NULL, + + -- When the worker was last seen + last_seen_at TIMESTAMP WITH TIME ZONE NOT NULL, + + -- When the worker was shut down + shutdown_at TIMESTAMP WITH TIME ZONE +); + +-- This single-row table stores the leader of the queue +-- The leader is responsible for running maintenance tasks +CREATE UNLOGGED TABLE queue_leader ( + -- This makes the row unique + active BOOLEAN NOT NULL DEFAULT TRUE UNIQUE, + + -- When the leader was elected + elected_at TIMESTAMP WITH TIME ZONE NOT NULL, + + -- Until when the lease is valid + expires_at TIMESTAMP WITH TIME ZONE NOT NULL, + + -- The worker ID of the leader + queue_worker_id UUID NOT NULL REFERENCES queue_workers (queue_worker_id), + + -- This, combined with the unique constraint, makes sure we only ever have a single row + CONSTRAINT queue_leader_active CHECK (active IS TRUE) +); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241004121132_queue_job.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241004121132_queue_job.sql new file mode 100644 index 00000000..859377d5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241004121132_queue_job.sql @@ -0,0 +1,79 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE TYPE queue_job_status AS ENUM ( + -- The job is available to be picked up by a worker + 'available', + + -- The job is currently being processed by a worker + 'running', + + -- The job has been completed + 'completed', + + -- The worker running the job was lost + 'lost' +); + +CREATE TABLE queue_jobs ( + queue_job_id UUID NOT NULL PRIMARY KEY, + + -- The status of the job + status queue_job_status NOT NULL DEFAULT 'available', + + -- When the job was created + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + + -- When the job was grabbed by a worker + started_at TIMESTAMP WITH TIME ZONE, + + -- Which worker is currently processing the job + started_by UUID REFERENCES queue_workers (queue_worker_id), + + -- When the job was completed + completed_at TIMESTAMP WITH TIME ZONE, + + -- The name of the queue this job belongs to + queue_name TEXT NOT NULL, + + -- The arguments to the job + payload JSONB NOT NULL DEFAULT '{}', + + -- Arbitrary metadata about the job, like the trace context + metadata JSONB NOT NULL DEFAULT '{}' +); + +-- When we grab jobs, we filter on the status of the job and the queue name +-- Then we order on the `queue_job_id` column, as it is a ULID, which ensures timestamp ordering +CREATE INDEX idx_queue_jobs_status_queue_job_id + ON queue_jobs + USING BTREE (status, queue_name, queue_job_id); + +-- We would like to notify workers when a job is available to wake them up +CREATE OR REPLACE FUNCTION queue_job_notify() + RETURNS TRIGGER + AS $$ +DECLARE + payload json; +BEGIN + IF NEW.status = 'available' THEN + -- The idea with this trigger is to notify the queue worker that a new job + -- is available on a queue. If there are many notifications with the same + -- payload, PG will coalesce them in a single notification, which is why we + -- keep the payload simple. + payload = json_build_object('queue', NEW.queue_name); + PERFORM + pg_notify('queue_available', payload::text); + END IF; + RETURN NULL; +END; +$$ +LANGUAGE plpgsql; + +CREATE TRIGGER queue_job_notify_trigger + AFTER INSERT OR UPDATE OF status + ON queue_jobs + FOR EACH ROW + EXECUTE PROCEDURE queue_job_notify(); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241007160050_oidc_login_hint.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241007160050_oidc_login_hint.sql new file mode 100644 index 00000000..b18932ec --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241007160050_oidc_login_hint.sql @@ -0,0 +1,3 @@ +-- Add login_hint to oauth2_authorization_grants +ALTER TABLE "oauth2_authorization_grants" + ADD COLUMN "login_hint" TEXT; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241115163340_upstream_oauth2_response_mode.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241115163340_upstream_oauth2_response_mode.sql new file mode 100644 index 00000000..8d65bd7f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241115163340_upstream_oauth2_response_mode.sql @@ -0,0 +1,8 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add the response_mode column to the upstream_oauth_providers table +ALTER TABLE "upstream_oauth_providers" + ADD COLUMN "response_mode" text NOT NULL DEFAULT 'query'; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241118115314_upstream_oauth2_extra_query_params.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241118115314_upstream_oauth2_extra_query_params.sql new file mode 100644 index 00000000..0e900e0a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241118115314_upstream_oauth2_extra_query_params.sql @@ -0,0 +1,9 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a column to the upstream_oauth_authorization_sessions table to store +-- extra query parameters +ALTER TABLE "upstream_oauth_authorization_sessions" + ADD COLUMN "extra_callback_parameters" JSONB; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241120163320_queue_job_failures.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241120163320_queue_job_failures.sql new file mode 100644 index 00000000..0407d634 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241120163320_queue_job_failures.sql @@ -0,0 +1,17 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a new status for failed jobs +ALTER TYPE "queue_job_status" ADD VALUE 'failed'; + +ALTER TABLE "queue_jobs" + -- When the job failed + ADD COLUMN "failed_at" TIMESTAMP WITH TIME ZONE, + -- Error message of the failure + ADD COLUMN "failed_reason" TEXT, + -- How many times we've already tried to run the job + ADD COLUMN "attempt" INTEGER NOT NULL DEFAULT 0, + -- The next attempt, if it was retried + ADD COLUMN "next_attempt_id" UUID REFERENCES "queue_jobs" ("queue_job_id"); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241122130349_queue_job_scheduled.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241122130349_queue_job_scheduled.sql new file mode 100644 index 00000000..e7aff6a0 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241122130349_queue_job_scheduled.sql @@ -0,0 +1,11 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a new status for scheduled jobs +ALTER TYPE "queue_job_status" ADD VALUE 'scheduled'; + +ALTER TABLE "queue_jobs" + -- When the job is scheduled to run + ADD COLUMN "scheduled_at" TIMESTAMP WITH TIME ZONE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241122133435_queue_job_scheduled_index.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241122133435_queue_job_scheduled_index.sql new file mode 100644 index 00000000..f8a7422e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241122133435_queue_job_scheduled_index.sql @@ -0,0 +1,9 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a partial index on scheduled jobs +CREATE INDEX "queue_jobs_scheduled_at_idx" + ON "queue_jobs" ("scheduled_at") + WHERE "status" = 'scheduled'; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241124145741_upstream_oauth_userinfo.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241124145741_upstream_oauth_userinfo.sql new file mode 100644 index 00000000..7c0168d5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241124145741_upstream_oauth_userinfo.sql @@ -0,0 +1,13 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add columms to upstream_oauth_providers and upstream_oauth_authorization_sessions +-- table to handle userinfo endpoint. +ALTER TABLE "upstream_oauth_providers" + ADD COLUMN "fetch_userinfo" BOOLEAN NOT NULL DEFAULT FALSE, + ADD COLUMN "userinfo_endpoint_override" TEXT; + +ALTER TABLE "upstream_oauth_authorization_sessions" + ADD COLUMN "userinfo" JSONB; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241125110803_queue_job_recurrent.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241125110803_queue_job_recurrent.sql new file mode 100644 index 00000000..814e073c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241125110803_queue_job_recurrent.sql @@ -0,0 +1,25 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a table to track the state of scheduled recurring jobs. +CREATE TABLE queue_schedules ( + -- A unique name for the schedule + schedule_name TEXT NOT NULL PRIMARY KEY, + + -- The last time the job was scheduled. If NULL, it means that the job was + -- never scheduled. + last_scheduled_at TIMESTAMP WITH TIME ZONE, + + -- The job that was scheduled last time. If NULL, it means that either the + -- job was never scheduled, or the job cleaned up from the database + last_scheduled_job_id UUID + REFERENCES queue_jobs (queue_job_id) +); + +-- When a job is scheduled from a recurring schedule, we keep a column +-- referencing the name of the schedule +ALTER TABLE queue_jobs + ADD COLUMN schedule_name TEXT + REFERENCES queue_schedules (schedule_name); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241129091057_upstream_oauth2_link_account_name.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241129091057_upstream_oauth2_link_account_name.sql new file mode 100644 index 00000000..3dad420e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241129091057_upstream_oauth2_link_account_name.sql @@ -0,0 +1,9 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add the human_account_name column to the upstream_oauth_links table to store +-- a human-readable name for the upstream account +ALTER TABLE "upstream_oauth_links" + ADD COLUMN "human_account_name" TEXT; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241202123523_upstream_oauth_responses_alg.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241202123523_upstream_oauth_responses_alg.sql new file mode 100644 index 00000000..baa7e92c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241202123523_upstream_oauth_responses_alg.sql @@ -0,0 +1,10 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add columns to upstream_oauth_providers to specify the +-- expected signing algorithm for the endpoint JWT responses. +ALTER TABLE "upstream_oauth_providers" + ADD COLUMN "id_token_signed_response_alg" TEXT NOT NULL DEFAULT 'RS256', + ADD COLUMN "userinfo_signed_response_alg" TEXT; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241210115428_oauth_refresh_token_track_next.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241210115428_oauth_refresh_token_track_next.sql new file mode 100644 index 00000000..b1bd2c3f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241210115428_oauth_refresh_token_track_next.sql @@ -0,0 +1,9 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a reference to the 'next' refresh token when it was consumed and replaced +ALTER TABLE oauth2_refresh_tokens + ADD COLUMN "next_oauth2_refresh_token_id" UUID + REFERENCES oauth2_refresh_tokens (oauth2_refresh_token_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241210133651_oauth2_access_token_first_used.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241210133651_oauth2_access_token_first_used.sql new file mode 100644 index 00000000..8d9507a6 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241210133651_oauth2_access_token_first_used.sql @@ -0,0 +1,8 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Track when the access token was first used. A NULL value means it was never used. +ALTER TABLE oauth2_access_tokens + ADD COLUMN "first_used_at" TIMESTAMP WITH TIME ZONE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241212154426_oauth2_response_mode_null.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241212154426_oauth2_response_mode_null.sql new file mode 100644 index 00000000..c6a6b7b4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241212154426_oauth2_response_mode_null.sql @@ -0,0 +1,7 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Drop not null requirement on response mode, so we can ignore this query parameter. +ALTER TABLE "upstream_oauth_providers" ALTER COLUMN "response_mode" DROP NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20241213180524_upstream_oauth_optional_issuer.sql b/matrix-authentication-service/crates/storage-pg/migrations/20241213180524_upstream_oauth_optional_issuer.sql new file mode 100644 index 00000000..75676bc3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20241213180524_upstream_oauth_optional_issuer.sql @@ -0,0 +1,8 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Make the issuer field in the upstream_oauth_providers table optional +ALTER TABLE "upstream_oauth_providers" + ALTER COLUMN "issuer" DROP NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250109105709_user_email_authentication_codes.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250109105709_user_email_authentication_codes.sql new file mode 100644 index 00000000..fb1b02e4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250109105709_user_email_authentication_codes.sql @@ -0,0 +1,29 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a table for storing email authentication sessions +CREATE TABLE "user_email_authentications" ( + "user_email_authentication_id" UUID PRIMARY KEY, + "user_session_id" UUID + REFERENCES "user_sessions" ("user_session_id") + ON DELETE SET NULL, + "email" TEXT NOT NULL, + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "completed_at" TIMESTAMP WITH TIME ZONE +); + +-- A single authentication session has multiple codes, in case the user ask for re-sending +CREATE TABLE "user_email_authentication_codes" ( + "user_email_authentication_code_id" UUID PRIMARY KEY, + "user_email_authentication_id" UUID + NOT NULL + REFERENCES "user_email_authentications" ("user_email_authentication_id") + ON DELETE CASCADE, + "code" TEXT NOT NULL, + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + "expires_at" TIMESTAMP WITH TIME ZONE NOT NULL, + CONSTRAINT "user_email_authentication_codes_auth_id_code_unique" + UNIQUE ("user_email_authentication_id", "code") +); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250113102144_user_registrations.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250113102144_user_registrations.sql new file mode 100644 index 00000000..6b4590f4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250113102144_user_registrations.sql @@ -0,0 +1,49 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a table for storing user registrations +CREATE TABLE "user_registrations" ( + "user_registration_id" UUID PRIMARY KEY, + + -- The IP address of the user agent, if any + "ip_address" INET, + + -- The user agent string of the user agent, if any + "user_agent" TEXT, + + -- The post auth action to execute after the registration, if any + "post_auth_action" JSONB, + + -- The username the user asked for + "username" TEXT NOT NULL, + + -- The display name the user asked for + "display_name" TEXT, + + -- The URL to the terms of service at the time of registration + "terms_url" TEXT, + + -- The ID of the email authentication session + "email_authentication_id" UUID + REFERENCES "user_email_authentications" ("user_email_authentication_id") + ON DELETE SET NULL, + + -- The hashed password of the user + "hashed_password" TEXT, + -- The scheme version used to hash the password + "hashed_password_version" INTEGER, + + -- When the object was created + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + + -- When the registration was completed + "completed_at" TIMESTAMP WITH TIME ZONE +); + +-- Allow using user email authentications for user registrations +ALTER TABLE "user_email_authentications" + ADD COLUMN "user_registration_id" UUID + REFERENCES "user_registrations" ("user_registration_id") + ON DELETE CASCADE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250114135939_allow_deviceless_compat_sessions.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250114135939_allow_deviceless_compat_sessions.sql new file mode 100644 index 00000000..8bf40f72 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250114135939_allow_deviceless_compat_sessions.sql @@ -0,0 +1,7 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Drop the `NOT NULL` requirement on compat sessions, so we can import device-less access tokens from Synapse. +ALTER TABLE compat_sessions ALTER COLUMN device_id DROP NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250115155255_cleanup_unverified_emails.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250115155255_cleanup_unverified_emails.sql new file mode 100644 index 00000000..61a4101f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250115155255_cleanup_unverified_emails.sql @@ -0,0 +1,14 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- This drops all the unverified email addresses from the database, as they are +-- now always verified when they land in the user_emails table. +-- We don't drop the `confirmed_at` column to allow rolling back + +-- First, truncate all the confirmation codes +TRUNCATE TABLE user_email_confirmation_codes; + +-- Then, delete all the unverified email addresses +DELETE FROM user_emails WHERE confirmed_at IS NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250124151529_unsupported_threepids_table.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250124151529_unsupported_threepids_table.sql new file mode 100644 index 00000000..f00cb324 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250124151529_unsupported_threepids_table.sql @@ -0,0 +1,30 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + + + +-- Tracks third-party ID associations that have been verified but are +-- not currently supported by MAS. +-- This is currently used when importing third-party IDs from Synapse, +-- which historically could verify at least phone numbers. +-- E-mail associations will not be stored in this table because those are natively +-- supported by MAS; see the `user_emails` table. + +CREATE TABLE user_unsupported_third_party_ids( + -- The owner of the third-party ID assocation + user_id UUID NOT NULL + REFERENCES users(user_id) ON DELETE CASCADE, + + -- What type of association is this? + medium TEXT NOT NULL, + + -- The address of the associated ID, e.g. a phone number or other identifier. + address TEXT NOT NULL, + + -- When the association was created + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + + PRIMARY KEY (user_id, medium, address) +); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250129154003_compat_sessions_device_name.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250129154003_compat_sessions_device_name.sql new file mode 100644 index 00000000..28294d5d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250129154003_compat_sessions_device_name.sql @@ -0,0 +1,9 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +ALTER TABLE compat_sessions + -- Stores a human-readable name for the device. + -- syn2mas behaviour: Will be populated from the device name in Synapse. + ADD COLUMN human_name TEXT; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250130170011_user_is_guest.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250130170011_user_is_guest.sql new file mode 100644 index 00000000..1ca8ce57 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250130170011_user_is_guest.sql @@ -0,0 +1,10 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +ALTER TABLE users + -- Track whether users are guests. + -- Although guest support is not present in MAS yet, syn2mas should import + -- these users and therefore we should track their state. + ADD COLUMN is_guest BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250225091000_dynamic_policy_data.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250225091000_dynamic_policy_data.sql new file mode 100644 index 00000000..38f87d10 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250225091000_dynamic_policy_data.sql @@ -0,0 +1,15 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a table which stores the latest policy data +-- +-- Every time the policy data is updated, it creates a new row, so that we keep +-- an history of the policy data, trace back which version of the data was used +-- on each evaluation. +CREATE TABLE IF NOT EXISTS policy_data ( + policy_data_id UUID PRIMARY KEY, + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + data JSONB NOT NULL +); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250311093145_user_deactivated_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250311093145_user_deactivated_at.sql new file mode 100644 index 00000000..e73e3c2a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250311093145_user_deactivated_at.sql @@ -0,0 +1,8 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +ALTER TABLE users + -- Track when a user was deactivated. + ADD COLUMN deactivated_at TIMESTAMP WITH TIME ZONE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250312094013_upstream_oauth2_providers_order.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250312094013_upstream_oauth2_providers_order.sql new file mode 100644 index 00000000..4cf422d7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250312094013_upstream_oauth2_providers_order.sql @@ -0,0 +1,9 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Adds a column to track the 'UI order' of the upstream OAuth2 providers, so +-- that they can be consistently displayed in the UI +ALTER TABLE upstream_oauth_providers + ADD COLUMN ui_order INTEGER NOT NULL DEFAULT 0; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250317151803_upstream_oauth_session_unlinked_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250317151803_upstream_oauth_session_unlinked_at.sql new file mode 100644 index 00000000..a3ad0dc8 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250317151803_upstream_oauth_session_unlinked_at.sql @@ -0,0 +1,7 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +ALTER TABLE upstream_oauth_authorization_sessions + ADD COLUMN unlinked_at TIMESTAMP WITH TIME ZONE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250325102310_oauth2_clients_hash.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250325102310_oauth2_clients_hash.sql new file mode 100644 index 00000000..3a4dc841 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250325102310_oauth2_clients_hash.sql @@ -0,0 +1,13 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Adds a column which stores a hash of the client metadata, so that we can +-- deduplicate client registrations +-- +-- This hash is a SHA-256 hash of the JSON-encoded client metadata. Note that we +-- don't retroactively hash existing clients, so this will only be populated for +-- new clients. +ALTER TABLE oauth2_clients + ADD COLUMN metadata_digest TEXT UNIQUE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250404105103_compat_sso_login_browser_session.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250404105103_compat_sso_login_browser_session.sql new file mode 100644 index 00000000..4b63590d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250404105103_compat_sso_login_browser_session.sql @@ -0,0 +1,23 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + + +-- Compat SSO Logins in the 'fulfilled' state will now be attached to +-- browser sessions, not compat sessions. +-- Only those in the 'exchanged' state will now have a compat session. +-- +-- Rationale: We can't create the compat session without the client +-- being given an opportunity to specify the device_id, which does not happen +-- until the exchange phase. + +-- Empty the table because we don't want to need to think about backwards +-- compatibility for fulfilled logins that don't have an attached +-- browser session ID. +TRUNCATE compat_sso_logins; + +ALTER TABLE compat_sso_logins + -- browser sessions and user sessions are the same thing + ADD COLUMN user_session_id UUID + REFERENCES user_sessions(user_session_id) ON DELETE CASCADE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000000_idx_compat_access_tokens_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000000_idx_compat_access_tokens_session_fk.sql new file mode 100644 index 00000000..880488b5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000000_idx_compat_access_tokens_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + compat_access_tokens_session_fk + ON compat_access_tokens (compat_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000001_idx_compat_refresh_tokens_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000001_idx_compat_refresh_tokens_session_fk.sql new file mode 100644 index 00000000..806a9f1e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000001_idx_compat_refresh_tokens_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + compat_refresh_tokens_session_fk + ON compat_refresh_tokens (compat_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000002_idx_compat_refresh_tokens_access_token_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000002_idx_compat_refresh_tokens_access_token_fk.sql new file mode 100644 index 00000000..399f3731 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000002_idx_compat_refresh_tokens_access_token_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + compat_refresh_tokens_access_token_fk + ON compat_refresh_tokens (compat_access_token_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000003_idx_compat_sessions_user_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000003_idx_compat_sessions_user_fk.sql new file mode 100644 index 00000000..1b038c53 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000003_idx_compat_sessions_user_fk.sql @@ -0,0 +1,13 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Including the `last_active_at` column lets us effeciently filter in-memory +-- for those sessions without fetching the rows, and without including it in the +-- index btree +CREATE INDEX CONCURRENTLY IF NOT EXISTS + compat_sessions_user_fk + ON compat_sessions (user_id) + INCLUDE (last_active_at); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000004_idx_compat_sessions_user_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000004_idx_compat_sessions_user_session_fk.sql new file mode 100644 index 00000000..52633ad0 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000004_idx_compat_sessions_user_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + compat_sessions_user_session_fk + ON compat_sessions (user_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000005_drop_compat_sessions_user_id_last_active_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000005_drop_compat_sessions_user_id_last_active_at.sql new file mode 100644 index 00000000..4c37289f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000005_drop_compat_sessions_user_id_last_active_at.sql @@ -0,0 +1,8 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Redundant with the `compat_sessions_user_fk` +DROP INDEX IF EXISTS compat_sessions_user_id_last_active_at; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000006_idx_compat_sso_logins_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000006_idx_compat_sso_logins_session_fk.sql new file mode 100644 index 00000000..da209e58 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000006_idx_compat_sso_logins_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + compat_sso_logins_session_fk + ON compat_sso_logins (compat_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000007_idx_oauth2_access_tokens_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000007_idx_oauth2_access_tokens_session_fk.sql new file mode 100644 index 00000000..fd44fb6c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000007_idx_oauth2_access_tokens_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_access_tokens_session_fk + ON oauth2_access_tokens (oauth2_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000008_idx_oauth2_authorization_grants_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000008_idx_oauth2_authorization_grants_session_fk.sql new file mode 100644 index 00000000..4a32010f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000008_idx_oauth2_authorization_grants_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_authorization_grants_session_fk + ON oauth2_authorization_grants (oauth2_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000009_idx_oauth2_authorization_grants_client_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000009_idx_oauth2_authorization_grants_client_fk.sql new file mode 100644 index 00000000..945d751d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000009_idx_oauth2_authorization_grants_client_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_authorization_grants_client_fk + ON oauth2_authorization_grants (oauth2_client_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000010_idx_oauth2_consents_client_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000010_idx_oauth2_consents_client_fk.sql new file mode 100644 index 00000000..b0928ef5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000010_idx_oauth2_consents_client_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_consents_client_fk + ON oauth2_consents (oauth2_client_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000011_idx_oauth2_consents_user_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000011_idx_oauth2_consents_user_fk.sql new file mode 100644 index 00000000..89751503 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000011_idx_oauth2_consents_user_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_consents_user_fk + ON oauth2_consents (user_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000012_idx_oauth2_device_code_grants_client_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000012_idx_oauth2_device_code_grants_client_fk.sql new file mode 100644 index 00000000..3f97117f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000012_idx_oauth2_device_code_grants_client_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_device_code_grants_client_fk + ON oauth2_device_code_grant (oauth2_client_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000013_idx_oauth2_device_code_grants_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000013_idx_oauth2_device_code_grants_session_fk.sql new file mode 100644 index 00000000..7400dcd5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000013_idx_oauth2_device_code_grants_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_device_code_grants_session_fk + ON oauth2_device_code_grant (oauth2_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000014_idx_oauth2_device_code_grants_user_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000014_idx_oauth2_device_code_grants_user_session_fk.sql new file mode 100644 index 00000000..8b07a436 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000014_idx_oauth2_device_code_grants_user_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_device_code_grants_user_session_fk + ON oauth2_device_code_grant (user_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000015_idx_oauth2_refresh_tokens_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000015_idx_oauth2_refresh_tokens_session_fk.sql new file mode 100644 index 00000000..7da89626 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000015_idx_oauth2_refresh_tokens_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_refresh_tokens_session_fk + ON oauth2_refresh_tokens (oauth2_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000016_idx_oauth2_refresh_tokens_access_token_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000016_idx_oauth2_refresh_tokens_access_token_fk.sql new file mode 100644 index 00000000..f6059d22 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000016_idx_oauth2_refresh_tokens_access_token_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_refresh_tokens_access_token_fk + ON oauth2_refresh_tokens (oauth2_access_token_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000017_idx_oauth2_refresh_tokens_next_refresh_token_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000017_idx_oauth2_refresh_tokens_next_refresh_token_fk.sql new file mode 100644 index 00000000..40fd117c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000017_idx_oauth2_refresh_tokens_next_refresh_token_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_refresh_tokens_next_refresh_token_fk + ON oauth2_refresh_tokens (next_oauth2_refresh_token_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000018_idx_oauth2_sessions_user_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000018_idx_oauth2_sessions_user_session_fk.sql new file mode 100644 index 00000000..b2639e71 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000018_idx_oauth2_sessions_user_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_sessions_user_session_fk + ON oauth2_sessions (user_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000019_idx_oauth2_sessions_client_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000019_idx_oauth2_sessions_client_fk.sql new file mode 100644 index 00000000..341d2dea --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000019_idx_oauth2_sessions_client_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_sessions_client_fk + ON oauth2_sessions (oauth2_client_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000020_idx_oauth2_sessions_user_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000020_idx_oauth2_sessions_user_fk.sql new file mode 100644 index 00000000..d7b5f52a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000020_idx_oauth2_sessions_user_fk.sql @@ -0,0 +1,13 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Including the `last_active_at` column lets us effeciently filter in-memory +-- for those sessions without fetching the rows, and without including it in the +-- index btree +CREATE INDEX CONCURRENTLY IF NOT EXISTS + oauth2_sessions_user_fk + ON oauth2_sessions (user_id) + INCLUDE (last_active_at); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000021_drop_oauth2_sessions_user_id_last_active_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000021_drop_oauth2_sessions_user_id_last_active_at.sql new file mode 100644 index 00000000..108dc290 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000021_drop_oauth2_sessions_user_id_last_active_at.sql @@ -0,0 +1,8 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Redundant with the `oauth2_sessions_user_fk` +DROP INDEX IF EXISTS oauth2_sessions_user_id_last_active_at; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000022_idx_queue_jobs_started_by_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000022_idx_queue_jobs_started_by_fk.sql new file mode 100644 index 00000000..27278533 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000022_idx_queue_jobs_started_by_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + queue_jobs_started_by_fk + ON queue_jobs (started_by); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000023_idx_queue_jobs_next_attempt_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000023_idx_queue_jobs_next_attempt_fk.sql new file mode 100644 index 00000000..9824557e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000023_idx_queue_jobs_next_attempt_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + queue_jobs_next_attempt_fk + ON queue_jobs (next_attempt_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000024_idx_queue_jobs_schedule_name_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000024_idx_queue_jobs_schedule_name_fk.sql new file mode 100644 index 00000000..4c70fea5 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000024_idx_queue_jobs_schedule_name_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + queue_jobs_schedule_name_fk + ON queue_jobs (schedule_name); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000025_idx_upstream_oauth_authorization_sessions_provider_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000025_idx_upstream_oauth_authorization_sessions_provider_fk.sql new file mode 100644 index 00000000..b86b1e0e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000025_idx_upstream_oauth_authorization_sessions_provider_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + upstream_oauth_authorization_sessions_provider_fk + ON upstream_oauth_authorization_sessions (upstream_oauth_provider_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000026_idx_upstream_oauth_authorization_sessions_link_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000026_idx_upstream_oauth_authorization_sessions_link_fk.sql new file mode 100644 index 00000000..1b296090 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000026_idx_upstream_oauth_authorization_sessions_link_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + upstream_oauth_authorization_sessions_link_fk + ON upstream_oauth_authorization_sessions (upstream_oauth_link_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000027_idx_upstream_oauth_links_provider_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000027_idx_upstream_oauth_links_provider_fk.sql new file mode 100644 index 00000000..55dd2261 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000027_idx_upstream_oauth_links_provider_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + upstream_oauth_links_provider_fk + ON upstream_oauth_links (upstream_oauth_provider_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000028_idx_upstream_oauth_links_user_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000028_idx_upstream_oauth_links_user_fk.sql new file mode 100644 index 00000000..21ba23e4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000028_idx_upstream_oauth_links_user_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + upstream_oauth_links_user_fk + ON upstream_oauth_links (user_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000029_idx_user_email_authentication_codes_authentication_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000029_idx_user_email_authentication_codes_authentication_fk.sql new file mode 100644 index 00000000..59140963 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000029_idx_user_email_authentication_codes_authentication_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_email_authentication_codes_authentication_fk + ON user_email_authentication_codes (user_email_authentication_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000030_idx_user_email_authentications_user_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000030_idx_user_email_authentications_user_session_fk.sql new file mode 100644 index 00000000..bd1558b7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000030_idx_user_email_authentications_user_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_email_authentications_user_session_fk + ON user_email_authentications (user_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000031_idx_user_email_authentications_user_registration_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000031_idx_user_email_authentications_user_registration_fk.sql new file mode 100644 index 00000000..7f233f7c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000031_idx_user_email_authentications_user_registration_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_email_authentications_user_registration_fk + ON user_email_authentications (user_registration_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000032_idx_user_emails_user_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000032_idx_user_emails_user_fk.sql new file mode 100644 index 00000000..c4834c71 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000032_idx_user_emails_user_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_emails_user_fk + ON user_emails (user_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000033_idx_user_emails_email_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000033_idx_user_emails_email_idx.sql new file mode 100644 index 00000000..b0f2ec4a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000033_idx_user_emails_email_idx.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- This isn't a foreign key, but we really need that to be indexed +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_emails_email_idx + ON user_emails (email); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000034_idx_user_passwords_user_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000034_idx_user_passwords_user_fk.sql new file mode 100644 index 00000000..334fb878 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000034_idx_user_passwords_user_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_passwords_user_fk + ON user_passwords (user_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000035_idx_user_recovery_tickets_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000035_idx_user_recovery_tickets_session_fk.sql new file mode 100644 index 00000000..6cda3719 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000035_idx_user_recovery_tickets_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_recovery_tickets_session_fk + ON user_recovery_tickets (user_recovery_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000036_idx_user_recovery_tickets_user_email_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000036_idx_user_recovery_tickets_user_email_fk.sql new file mode 100644 index 00000000..e6561ade --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000036_idx_user_recovery_tickets_user_email_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_recovery_tickets_user_email_fk + ON user_recovery_tickets (user_email_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000037_idx_user_registrations_email_authentication_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000037_idx_user_registrations_email_authentication_fk.sql new file mode 100644 index 00000000..95a9ec12 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000037_idx_user_registrations_email_authentication_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_registrations_email_authentication_fk + ON user_registrations (email_authentication_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000038_idx_user_session_authentications_user_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000038_idx_user_session_authentications_user_session_fk.sql new file mode 100644 index 00000000..ee78dbd7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000038_idx_user_session_authentications_user_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_session_authentications_user_session_fk + ON user_session_authentications (user_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000039_idx_user_session_authentications_user_password_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000039_idx_user_session_authentications_user_password_fk.sql new file mode 100644 index 00000000..450a0672 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000039_idx_user_session_authentications_user_password_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_session_authentications_user_password_fk + ON user_session_authentications (user_password_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000040_idx_user_session_authentications_upstream_oauth_session_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000040_idx_user_session_authentications_upstream_oauth_session_fk.sql new file mode 100644 index 00000000..c021595d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000040_idx_user_session_authentications_upstream_oauth_session_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_session_authentications_upstream_oauth_session_fk + ON user_session_authentications (upstream_oauth_authorization_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000041_idx_user_sessions_user_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000041_idx_user_sessions_user_fk.sql new file mode 100644 index 00000000..443216bd --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000041_idx_user_sessions_user_fk.sql @@ -0,0 +1,13 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Including the `last_active_at` column lets us effeciently filter in-memory +-- for those sessions without fetching the rows, and without including it in the +-- index btree +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_sessions_user_fk + ON user_sessions (user_id) + INCLUDE (last_active_at); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000042_drop_user_sessions_user_id_last_active_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000042_drop_user_sessions_user_id_last_active_at.sql new file mode 100644 index 00000000..1c95573c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000042_drop_user_sessions_user_id_last_active_at.sql @@ -0,0 +1,8 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Redundant with the `user_sessions_user_fk` +DROP INDEX IF EXISTS user_sessions_user_id_last_active_at; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000043_idx_user_terms_user_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000043_idx_user_terms_user_fk.sql new file mode 100644 index 00000000..f885b557 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000043_idx_user_terms_user_fk.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_terms_user_fk + ON user_terms (user_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000044_idx_users_primary_email_fk.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000044_idx_users_primary_email_fk.sql new file mode 100644 index 00000000..0a6318ef --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000044_idx_users_primary_email_fk.sql @@ -0,0 +1,11 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- We don't use this column anymore, but… it will still tank the performance on +-- deletions of user_emails if we don't have it +CREATE INDEX CONCURRENTLY IF NOT EXISTS + users_primary_email_fk + ON users (primary_user_email_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410000045_idx_user_recovery_tickets_ticket_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410000045_idx_user_recovery_tickets_ticket_idx.sql new file mode 100644 index 00000000..648b42fa --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410000045_idx_user_recovery_tickets_ticket_idx.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- This isn't a foreign key, but we really need that to be indexed +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_recovery_tickets_ticket_idx + ON user_recovery_tickets (ticket); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410121612_users_lower_username_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410121612_users_lower_username_idx.sql new file mode 100644 index 00000000..0225f6a7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410121612_users_lower_username_idx.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Create an index on the username column, lower-cased, so that we can lookup +-- usernames in a case-insensitive manner. +CREATE INDEX CONCURRENTLY IF NOT EXISTS users_lower_username_idx + ON users (LOWER(username)); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250410174306_oauth2_authorization_default_requires_consent.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250410174306_oauth2_authorization_default_requires_consent.sql new file mode 100644 index 00000000..05960c32 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250410174306_oauth2_authorization_default_requires_consent.sql @@ -0,0 +1,9 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- We stopped reading/writing to this column, but it's not nullable. +-- So we need to add a default value, and drop it in the next release +ALTER TABLE oauth2_authorization_grants + ALTER COLUMN requires_consent SET DEFAULT false; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250424150930_oauth2_grants_locale.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250424150930_oauth2_grants_locale.sql new file mode 100644 index 00000000..699f70cf --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250424150930_oauth2_grants_locale.sql @@ -0,0 +1,8 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Track the locale of the user which asked for the authorization grant +ALTER TABLE oauth2_authorization_grants + ADD COLUMN locale TEXT; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250425113717_oauth2_session_human_name.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250425113717_oauth2_session_human_name.sql new file mode 100644 index 00000000..82a07c6d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250425113717_oauth2_session_human_name.sql @@ -0,0 +1,8 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a user-provided human name to OAuth 2.0 sessions +ALTER TABLE oauth2_sessions + ADD COLUMN human_name TEXT; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250506161158_upstream_oauth2_forward_login_hint.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250506161158_upstream_oauth2_forward_login_hint.sql new file mode 100644 index 00000000..2aa29a82 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250506161158_upstream_oauth2_forward_login_hint.sql @@ -0,0 +1,8 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add the forward_login_hint column to the upstream_oauth_providers table +ALTER TABLE "upstream_oauth_providers" + ADD COLUMN "forward_login_hint" BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250507131948_upstream_oauth_session_optional_nonce.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250507131948_upstream_oauth_session_optional_nonce.sql new file mode 100644 index 00000000..1b637c91 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250507131948_upstream_oauth_session_optional_nonce.sql @@ -0,0 +1,8 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Make the nonce column optional on the upstream oauth sessions +ALTER TABLE "upstream_oauth_authorization_sessions" + ALTER COLUMN "nonce" DROP NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250602212100_user_registration_tokens.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250602212100_user_registration_tokens.sql new file mode 100644 index 00000000..2f9ec3cd --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250602212100_user_registration_tokens.sql @@ -0,0 +1,57 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +-- Add a table for storing user registration tokens +CREATE TABLE "user_registration_tokens" ( + "user_registration_token_id" UUID PRIMARY KEY, + + -- The token string that users need to provide during registration + "token" TEXT NOT NULL UNIQUE, + + -- Optional limit on how many times this token can be used + "usage_limit" INTEGER, + + -- How many times this token has been used + "times_used" INTEGER NOT NULL DEFAULT 0, + + -- When the token was created + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL, + + -- When the token was last used + "last_used_at" TIMESTAMP WITH TIME ZONE, + + -- Optional expiration time for the token + "expires_at" TIMESTAMP WITH TIME ZONE, + + -- When the token was revoked + "revoked_at" TIMESTAMP WITH TIME ZONE +); + +-- Create a few indices on the table, as we use those for filtering +-- They are safe to create non-concurrently, as the table is empty at this point +CREATE INDEX "user_registration_tokens_usage_limit_idx" + ON "user_registration_tokens" ("usage_limit"); + +CREATE INDEX "user_registration_tokens_times_used_idx" + ON "user_registration_tokens" ("times_used"); + +CREATE INDEX "user_registration_tokens_created_at_idx" + ON "user_registration_tokens" ("created_at"); + +CREATE INDEX "user_registration_tokens_last_used_at_idx" + ON "user_registration_tokens" ("last_used_at"); + +CREATE INDEX "user_registration_tokens_expires_at_idx" + ON "user_registration_tokens" ("expires_at"); + +CREATE INDEX "user_registration_tokens_revoked_at_idx" + ON "user_registration_tokens" ("revoked_at"); + +-- Add foreign key reference to registration tokens in user registrations +-- A second migration will add the index for this foreign key +ALTER TABLE "user_registrations" + ADD COLUMN "user_registration_token_id" UUID + REFERENCES "user_registration_tokens" ("user_registration_token_id") + ON DELETE SET NULL; \ No newline at end of file diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250602212101_idx_user_registration_token.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250602212101_idx_user_registration_token.sql new file mode 100644 index 00000000..957b453b --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250602212101_idx_user_registration_token.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE in the repository root for full details. + +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_registrations_user_registration_token_id_fk + ON user_registrations (user_registration_token_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250602212102_upstream_oauth2_id_token_claims.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250602212102_upstream_oauth2_id_token_claims.sql new file mode 100644 index 00000000..6cb78a4c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250602212102_upstream_oauth2_id_token_claims.sql @@ -0,0 +1,8 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This is the decoded claims from the ID token stored as JSONB +ALTER TABLE upstream_oauth_authorization_sessions + ADD COLUMN id_token_claims JSONB; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250602212103_upstream_oauth2_id_token_claims_sub_sid_index.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250602212103_upstream_oauth2_id_token_claims_sub_sid_index.sql new file mode 100644 index 00000000..32702216 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250602212103_upstream_oauth2_id_token_claims_sub_sid_index.sql @@ -0,0 +1,15 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- We'll be requesting authorization sessions by provider, sub and sid, so we'll +-- need to index those columns +CREATE INDEX CONCURRENTLY IF NOT EXISTS + upstream_oauth_authorization_sessions_sub_sid_idx + ON upstream_oauth_authorization_sessions ( + upstream_oauth_provider_id, + (id_token_claims->>'sub'), + (id_token_claims->>'sid') + ); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250602212104_upstream_oauth2_id_token_claims_sid_sub_index.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250602212104_upstream_oauth2_id_token_claims_sid_sub_index.sql new file mode 100644 index 00000000..097c3da3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250602212104_upstream_oauth2_id_token_claims_sid_sub_index.sql @@ -0,0 +1,15 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- We'll be requesting authorization sessions by provider, sub and sid, so we'll +-- need to index those columns +CREATE INDEX CONCURRENTLY IF NOT EXISTS + upstream_oauth_authorization_sessions_sid_sub_idx + ON upstream_oauth_authorization_sessions ( + upstream_oauth_provider_id, + (id_token_claims->>'sid'), + (id_token_claims->>'sub') + ); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250630120643_upstream_oauth_on_backchannel_logout.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250630120643_upstream_oauth_on_backchannel_logout.sql new file mode 100644 index 00000000..f6031ca6 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250630120643_upstream_oauth_on_backchannel_logout.sql @@ -0,0 +1,10 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This defines the behavior when receiving a backchannel logout notification +ALTER TABLE "upstream_oauth_providers" + ADD COLUMN "on_backchannel_logout" TEXT + NOT NULL + DEFAULT 'do_nothing'; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250708155857_idx_user_emails_lower_email.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250708155857_idx_user_emails_lower_email.sql new file mode 100644 index 00000000..1b4f9afe --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250708155857_idx_user_emails_lower_email.sql @@ -0,0 +1,11 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- When we're looking up an email address, we want to be able to do a case-insensitive +-- lookup, so we index the email address lowercase and request it like that +CREATE INDEX CONCURRENTLY IF NOT EXISTS + user_emails_lower_email_idx + ON user_emails (LOWER(email)); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250709142230_id_token_claims_trigger.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250709142230_id_token_claims_trigger.sql new file mode 100644 index 00000000..32d30472 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250709142230_id_token_claims_trigger.sql @@ -0,0 +1,51 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- We may be running an older version of the app that doesn't fill in the +-- id_token_claims column when the id_token column is populated. So we add a +-- trigger to fill in the id_token_claims column if it's NULL. +-- +-- We will be able to remove this trigger in a future version of the app. +-- +-- We backfill in a second migration after this one to make sure we don't miss +-- any rows, and don't lock the table for too long. +CREATE OR REPLACE FUNCTION fill_id_token_claims() +RETURNS TRIGGER AS $$ +BEGIN + -- Only process if id_token_claims is NULL but id_token is not NULL + IF NEW.id_token_claims IS NULL AND NEW.id_token IS NOT NULL AND NEW.id_token != '' THEN + BEGIN + -- Decode JWT payload inline + NEW.id_token_claims := ( + CASE + WHEN split_part(NEW.id_token, '.', 2) = '' THEN NULL + ELSE + (convert_from( + decode( + replace(replace(split_part(NEW.id_token, '.', 2), '-', '+'), '_', '/') || + repeat('=', (4 - length(split_part(NEW.id_token, '.', 2)) % 4) % 4), + 'base64' + ), + 'UTF8' + ))::JSONB + END + ); + EXCEPTION + WHEN OTHERS THEN + -- If JWT decoding fails, leave id_token_claims as NULL + NEW.id_token_claims := NULL; + END; + END IF; + + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- Create the trigger +CREATE TRIGGER trg_fill_id_token_claims + BEFORE INSERT OR UPDATE ON upstream_oauth_authorization_sessions + FOR EACH ROW + WHEN (NEW.id_token_claims IS NULL AND NEW.id_token IS NOT NULL AND NEW.id_token <> '') + EXECUTE FUNCTION fill_id_token_claims(); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250709142240_backfill_id_token_claims.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250709142240_backfill_id_token_claims.sql new file mode 100644 index 00000000..c2fa067a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250709142240_backfill_id_token_claims.sql @@ -0,0 +1,22 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This backfills the id_token_claims column in the upstream_oauth_authorization_sessions table +-- by decoding the id_token column and storing the decoded claims in the id_token_claims column. +UPDATE upstream_oauth_authorization_sessions +SET id_token_claims = CASE + WHEN id_token IS NULL OR id_token = '' THEN NULL + WHEN split_part(id_token, '.', 2) = '' THEN NULL + ELSE + (convert_from( + decode( + replace(replace(split_part(id_token, '.', 2), '-', '+'), '_', '/') || + repeat('=', (4 - length(split_part(id_token, '.', 2)) % 4) % 4), + 'base64' + ), + 'UTF8' + ))::JSONB +END +WHERE id_token IS NOT NULL AND id_token_claims IS NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250915092000_pgtrgm_extension.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250915092000_pgtrgm_extension.sql new file mode 100644 index 00000000..2ebc26d2 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250915092000_pgtrgm_extension.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This enables the pg_trgm extension, which is used for search filters +-- Starting Posgres 16, this extension is marked as "trusted", meaning it can be +-- installed by non-superusers +CREATE EXTENSION IF NOT EXISTS pg_trgm; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250915092635_users_username_trgm_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250915092635_users_username_trgm_idx.sql new file mode 100644 index 00000000..3e2cd4dc --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250915092635_users_username_trgm_idx.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This adds an index on the username field for ILIKE '%search%' operations, +-- enabling fuzzy searches of usernames +CREATE INDEX CONCURRENTLY IF NOT EXISTS users_username_trgm_idx + ON users USING gin(username gin_trgm_ops); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20250924132713_personal_access_tokens.sql b/matrix-authentication-service/crates/storage-pg/migrations/20250924132713_personal_access_tokens.sql new file mode 100644 index 00000000..0e113b15 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20250924132713_personal_access_tokens.sql @@ -0,0 +1,68 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- A family of personal access tokens. This is a long-lived wrapper around the personal access tokens +-- themselves, allowing tokens to be regenerated whilst still retaining a persistent identifier for them. +CREATE TABLE personal_sessions ( + personal_session_id UUID NOT NULL PRIMARY KEY, + + -- If this session is owned by a user, the ID of the user. + -- Null otherwise. + owner_user_id UUID REFERENCES users(user_id), + + -- If this session is owned by an OAuth 2 Client (via Client Credentials grant), + -- the ID of the owning client. + -- Null otherwise. + owner_oauth2_client_id UUID REFERENCES oauth2_clients(oauth2_client_id), + + actor_user_id UUID NOT NULL REFERENCES users(user_id), + -- A human-readable label, intended to describe what the session is for. + human_name TEXT NOT NULL, + -- The OAuth2 scopes for the session, identical to OAuth2 sessions. + -- May include a device ID, but this is optional (sessions can be deviceless). + scope_list TEXT[] NOT NULL, + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + -- If set, none of the tokens will be valid anymore. + revoked_at TIMESTAMP WITH TIME ZONE, + last_active_at TIMESTAMP WITH TIME ZONE, + last_active_ip INET, + + -- There must be exactly one owner. + CONSTRAINT personal_sessions_exactly_one_owner CHECK ((owner_user_id IS NULL) <> (owner_oauth2_client_id IS NULL)) +); + +-- Individual tokens. +CREATE TABLE personal_access_tokens ( + personal_access_token_id UUID NOT NULL PRIMARY KEY, + -- The session this access token belongs to. + personal_session_id UUID NOT NULL REFERENCES personal_sessions(personal_session_id), + -- SHA256 of the access token. + -- This is a lightweight measure to stop a database backup (or other + -- unauthorised read-only database access) escalating into real permissions + -- on a live system. + -- We could have used a hash with secret key, but this would no longer be + -- 'free' protection because it would need configuration (and introduce + -- potential issues with configuring it wrong). + -- This is currently inconsistent with other access token tables but it would + -- make sense to migrate those to match in the future. + access_token_sha256 BYTEA NOT NULL UNIQUE + -- A SHA256 hash is 32 bytes long + CHECK (octet_length(access_token_sha256) = 32), + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + -- If set, the token won't be valid after this time. + -- If not set, the token never automatically expires. + expires_at TIMESTAMP WITH TIME ZONE, + -- If set, this token is not valid anymore. + revoked_at TIMESTAMP WITH TIME ZONE +); + +-- Ensure we can only have one active personal access token in each family. +CREATE UNIQUE INDEX ON personal_access_tokens (personal_session_id) WHERE revoked_at IS NOT NULL; + +-- Add indices to satisfy foreign key backward checks +-- (and likely filter queries) +CREATE INDEX ON personal_sessions (owner_user_id) WHERE owner_user_id IS NOT NULL; +CREATE INDEX ON personal_sessions (owner_oauth2_client_id) WHERE owner_oauth2_client_id IS NOT NULL; +CREATE INDEX ON personal_sessions (actor_user_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20251023134634_personal_access_tokens_unique_fix.sql b/matrix-authentication-service/crates/storage-pg/migrations/20251023134634_personal_access_tokens_unique_fix.sql new file mode 100644 index 00000000..9274d16a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20251023134634_personal_access_tokens_unique_fix.sql @@ -0,0 +1,14 @@ +-- Copyright 2025 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + + +-- Fix a faulty constraint. +-- The condition was incorrectly specified as `revoked_at IS NOT NULL` +-- when `revoked_at IS NULL` was meant. + +DROP INDEX personal_access_tokens_personal_session_id_idx; + +-- Ensure we can only have one active personal access token in each family. +CREATE UNIQUE INDEX ON personal_access_tokens (personal_session_id) WHERE revoked_at IS NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20251121145458_user_registration_upstream_oauth_session.sql b/matrix-authentication-service/crates/storage-pg/migrations/20251121145458_user_registration_upstream_oauth_session.sql new file mode 100644 index 00000000..4717dee1 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20251121145458_user_registration_upstream_oauth_session.sql @@ -0,0 +1,10 @@ +-- Copyright 2025 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- Track what upstream OAuth session to associate during user registration +ALTER TABLE user_registrations + ADD COLUMN upstream_oauth_authorization_session_id UUID + REFERENCES upstream_oauth_authorization_sessions (upstream_oauth_authorization_session_id) + ON DELETE SET NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20251127145951_user_registration_upstream_oauth_session_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20251127145951_user_registration_upstream_oauth_session_idx.sql new file mode 100644 index 00000000..665a2ff1 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20251127145951_user_registration_upstream_oauth_session_idx.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- Index on the new foreign key added by the previous migration +CREATE INDEX CONCURRENTLY IF NOT EXISTS user_registrations_upstream_oauth_session_id_idx + ON user_registrations (upstream_oauth_authorization_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260108111542_remove_apalis.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260108111542_remove_apalis.sql new file mode 100644 index 00000000..e6e90fb4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260108111542_remove_apalis.sql @@ -0,0 +1,14 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- We replaced apalis a while back but did not clean the database. This removes +-- everything related to apalis +DROP TRIGGER IF EXISTS notify_workers ON apalis.jobs; +DROP FUNCTION IF EXISTS apalis.notify_new_jobs(); +DROP FUNCTION IF EXISTS apalis.get_jobs(text, text, integer); +DROP FUNCTION IF EXISTS apalis.push_job(text, json, text, timestamp with time zone, integer); +DROP TABLE IF EXISTS apalis.jobs; +DROP TABLE IF EXISTS apalis.workers; +DROP SCHEMA IF EXISTS apalis; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260108120030_remove_user_emails_old_confirmation.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260108120030_remove_user_emails_old_confirmation.sql new file mode 100644 index 00000000..f5ba7a02 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260108120030_remove_user_emails_old_confirmation.sql @@ -0,0 +1,18 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- We reworked how email verification works in +-- https://github.com/element-hq/matrix-authentication-service/pull/3784 +-- but kept some old schema around to allow rolling back. We're safe to drop +-- those now + +-- Users don't have a 'primary email' anymore +ALTER TABLE users DROP COLUMN primary_user_email_id; + +-- Replaced by user_email_authentications +DROP TABLE user_email_confirmation_codes; + +-- User emails are always confirmed when they are in this table now +ALTER TABLE user_emails DROP COLUMN confirmed_at; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260108121127_cleanup_oauth2_consents.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260108121127_cleanup_oauth2_consents.sql new file mode 100644 index 00000000..ec3ea808 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260108121127_cleanup_oauth2_consents.sql @@ -0,0 +1,18 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- We've removed the idea of conditional consent (just go through the login if +-- we already consented in the past) but didn't do the cleanup in +-- https://github.com/element-hq/matrix-authentication-service/pull/4386 + +-- In this version we completely stopped writing to this table, so that it's +-- safe to completely drop in the next version +TRUNCATE TABLE oauth2_consents; + +-- We stopped reading and writing in those columns a long time ago, so it's fine +-- to drop them now +ALTER TABLE oauth2_authorization_grants + DROP COLUMN max_age, + DROP COLUMN requires_consent; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260108121952_cleanup_id_token_claims_trigger.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260108121952_cleanup_id_token_claims_trigger.sql new file mode 100644 index 00000000..f6cb6a7a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260108121952_cleanup_id_token_claims_trigger.sql @@ -0,0 +1,11 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- When we introduced an id_token_claims column on upstream OAuth 2.0 logins, we +-- added a trigger to make sure that when rolling back the new columns gets +-- automatically filled correctly. It's been a while, it's safe to remove them. +-- https://github.com/element-hq/matrix-authentication-service/pull/4743 +DROP TRIGGER IF EXISTS trg_fill_id_token_claims ON upstream_oauth_authorization_sessions; +DROP FUNCTION IF EXISTS fill_id_token_claims(); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260108144040_remove_deactivated_unsupported_threepids.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260108144040_remove_deactivated_unsupported_threepids.sql new file mode 100644 index 00000000..5c57a734 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260108144040_remove_deactivated_unsupported_threepids.sql @@ -0,0 +1,13 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This removes unsupported threepids from deactivated users, as we forgot to do +-- it in the past. It's fine to do this in a migration as no one is reading from +-- this table and it is quite small. Follows up from: +-- https://github.com/element-hq/matrix-authentication-service/pull/5406 +DELETE FROM user_unsupported_third_party_ids +USING users +WHERE users.deactivated_at IS NOT NULL + AND users.user_id = user_unsupported_third_party_ids.user_id; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260108145240_drop_oauth2_consents.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260108145240_drop_oauth2_consents.sql new file mode 100644 index 00000000..c8fb86e9 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260108145240_drop_oauth2_consents.sql @@ -0,0 +1,9 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- We've stopped writing to this table in the following PR: +-- https://github.com/element-hq/matrix-authentication-service/pull/5405 +-- This migration should be released in the version after that for safe rollout +DROP TABLE IF EXISTS oauth2_consents; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260108175627_oauth_access_tokens_revoked_at_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260108175627_oauth_access_tokens_revoked_at_idx.sql new file mode 100644 index 00000000..aa119ec2 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260108175627_oauth_access_tokens_revoked_at_idx.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This adds an index on the revoked_at field on oauth2_access_tokens to speed up cleaning them up +CREATE INDEX CONCURRENTLY IF NOT EXISTS oauth_access_tokens_revoked_at_idx + ON oauth2_access_tokens (revoked_at) WHERE revoked_at IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260109115009_oauth_access_tokens_expires_at_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260109115009_oauth_access_tokens_expires_at_idx.sql new file mode 100644 index 00000000..f3e983ef --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260109115009_oauth_access_tokens_expires_at_idx.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This adds an index on the expires_at field on oauth2_access_tokens to speed up cleaning them up +CREATE INDEX CONCURRENTLY IF NOT EXISTS oauth_access_tokens_expires_at_idx + ON oauth2_access_tokens (expires_at) WHERE expires_at IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260109172537_oauth_refresh_token_revoked_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260109172537_oauth_refresh_token_revoked_at.sql new file mode 100644 index 00000000..6b982e83 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260109172537_oauth_refresh_token_revoked_at.sql @@ -0,0 +1,9 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This adds an index on the revoked_at field on oauth2_refresh_tokens to speed up cleaning them up +CREATE INDEX CONCURRENTLY IF NOT EXISTS oauth_refresh_tokens_revoked_at_idx + ON oauth2_refresh_tokens (revoked_at) WHERE revoked_at IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260109172950_oauth_refresh_token_next_token_set_null.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260109172950_oauth_refresh_token_next_token_set_null.sql new file mode 100644 index 00000000..734329d3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260109172950_oauth_refresh_token_next_token_set_null.sql @@ -0,0 +1,15 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- Replace the foreign key constraint on the next refresh token to set the field +-- to NULL on delete. We re-introduce the constraint as NOT VALID to avoid +-- locking the table, and a second migration validates the constraint +ALTER TABLE oauth2_refresh_tokens + DROP CONSTRAINT IF EXISTS oauth2_refresh_tokens_next_oauth2_refresh_token_id_fkey, + ADD CONSTRAINT oauth2_refresh_tokens_next_oauth2_refresh_token_id_fkey + FOREIGN KEY (next_oauth2_refresh_token_id) + REFERENCES oauth2_refresh_tokens (oauth2_refresh_token_id) + ON DELETE SET NULL + NOT VALID; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260109172954_oauth_refresh_token_next_token_set_null_validate.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260109172954_oauth_refresh_token_next_token_set_null_validate.sql new file mode 100644 index 00000000..38b98228 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260109172954_oauth_refresh_token_next_token_set_null_validate.sql @@ -0,0 +1,9 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- Validate the foreign key constraint on the next refresh token introduced in +-- the previous migration +ALTER TABLE oauth2_refresh_tokens + VALIDATE CONSTRAINT oauth2_refresh_tokens_next_oauth2_refresh_token_id_fkey; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260112094550_oauth_refresh_token_not_consumed_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260112094550_oauth_refresh_token_not_consumed_idx.sql new file mode 100644 index 00000000..6d44b465 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260112094550_oauth_refresh_token_not_consumed_idx.sql @@ -0,0 +1,11 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- Adds a partial index on oauth2_refresh_tokens that are consumed +-- to speed up cleaning up of consumed tokens +CREATE INDEX CONCURRENTLY IF NOT EXISTS oauth_refresh_token_not_consumed_idx + ON oauth2_refresh_tokens (oauth2_refresh_token_id) + WHERE consumed_at IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260112094837_oauth_refresh_token_consumed_at_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260112094837_oauth_refresh_token_consumed_at_idx.sql new file mode 100644 index 00000000..ff99e7a6 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260112094837_oauth_refresh_token_consumed_at_idx.sql @@ -0,0 +1,11 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- Adds a partial index on oauth2_refresh_tokens on the consumed_at field, +-- including other interesting fields to speed up cleaning up of consumed tokens +CREATE INDEX CONCURRENTLY IF NOT EXISTS oauth_refresh_token_consumed_at_idx + ON oauth2_refresh_tokens (consumed_at, next_oauth2_refresh_token_id, oauth2_refresh_token_id) + WHERE consumed_at IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260115111313_idx_compat_sessions_finished_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260115111313_idx_compat_sessions_finished_at.sql new file mode 100644 index 00000000..8e5acb2c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260115111313_idx_compat_sessions_finished_at.sql @@ -0,0 +1,11 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Index to efficiently query finished compat sessions for cleanup +-- Only includes non-null finished_at values since we filter on finished_at IS NOT NULL +CREATE INDEX CONCURRENTLY IF NOT EXISTS "compat_sessions_finished_at_idx" + ON "compat_sessions" ("finished_at") + WHERE "finished_at" IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260116000002_idx_upstream_oauth_links_orphaned.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260116000002_idx_upstream_oauth_links_orphaned.sql new file mode 100644 index 00000000..97d324a2 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260116000002_idx_upstream_oauth_links_orphaned.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Add partial index for cleanup of orphaned upstream OAuth links +CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_upstream_oauth_links_orphaned + ON upstream_oauth_links (upstream_oauth_link_id) + WHERE user_id IS NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260116000003_queue_jobs_next_attempt_set_null.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260116000003_queue_jobs_next_attempt_set_null.sql new file mode 100644 index 00000000..c9493450 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260116000003_queue_jobs_next_attempt_set_null.sql @@ -0,0 +1,14 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Change the FK constraint on next_attempt_id to SET NULL on delete +-- This allows us to clean up old completed/failed jobs without breaking retry chains +ALTER TABLE queue_jobs + DROP CONSTRAINT queue_jobs_next_attempt_id_fkey, + ADD CONSTRAINT queue_jobs_next_attempt_id_fkey + FOREIGN KEY (next_attempt_id) + REFERENCES queue_jobs (queue_job_id) + ON DELETE SET NULL + NOT VALID; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260116000004_queue_jobs_next_attempt_set_null_validate.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260116000004_queue_jobs_next_attempt_set_null_validate.sql new file mode 100644 index 00000000..b29424a4 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260116000004_queue_jobs_next_attempt_set_null_validate.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Validate the FK constraint that was added in the previous migration +-- This is done in a separate migration to avoid holding locks for too long +ALTER TABLE queue_jobs + VALIDATE CONSTRAINT queue_jobs_next_attempt_id_fkey; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260121103025_upstream_oauth_track_user_session.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260121103025_upstream_oauth_track_user_session.sql new file mode 100644 index 00000000..f71d3280 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260121103025_upstream_oauth_track_user_session.sql @@ -0,0 +1,11 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Start tracking the associated `user_session` directly on the authorization session +-- This will be backfilled in a separate migration rolling in the next version +ALTER TABLE upstream_oauth_authorization_sessions + ADD COLUMN user_session_id UUID + REFERENCES user_sessions (user_session_id) + ON DELETE SET NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260121104214_upstream_auth_user_session_fk_idx.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260121104214_upstream_auth_user_session_fk_idx.sql new file mode 100644 index 00000000..a5d3a232 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260121104214_upstream_auth_user_session_fk_idx.sql @@ -0,0 +1,11 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Adds an index on the user_session_id column on the +-- upstream_oauth_authorization_sessions table +CREATE INDEX CONCURRENTLY IF NOT EXISTS + upstream_oauth_authorization_sessions_user_session_id_idx + ON upstream_oauth_authorization_sessions (user_session_id); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260121112201_upstream_oauth_sessions_orphan_index.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260121112201_upstream_oauth_sessions_orphan_index.sql new file mode 100644 index 00000000..9efad01d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260121112201_upstream_oauth_sessions_orphan_index.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Add partial index for cleanup of orphaned upstream OAuth sessions +CREATE INDEX CONCURRENTLY IF NOT EXISTS upstream_oauth_authorization_sessions_orphaned + ON upstream_oauth_authorization_sessions (upstream_oauth_authorization_session_id) + WHERE user_session_id IS NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260121121140_upstream_oauth_track_user_session_trigger.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260121121140_upstream_oauth_track_user_session_trigger.sql new file mode 100644 index 00000000..122879dc --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260121121140_upstream_oauth_track_user_session_trigger.sql @@ -0,0 +1,27 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Adds a trigger which will backfill the user_session_id column when inserting +-- a new user_session_authentications row. This is to help supporting rolling +-- back to previous releases and should be dropped in a future version. +CREATE OR REPLACE FUNCTION upstream_oauth_authorization_sessions_insert_trigger() +RETURNS TRIGGER AS $$ +BEGIN + IF NEW.upstream_oauth_authorization_session_id IS NOT NULL THEN + UPDATE upstream_oauth_authorization_sessions + SET user_session_id = NEW.user_session_id + WHERE upstream_oauth_authorization_sessions.upstream_oauth_authorization_session_id + = NEW.upstream_oauth_authorization_session_id + AND upstream_oauth_authorization_sessions.user_session_id IS NULL; + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- Create the trigger +CREATE TRIGGER upstream_oauth_authorization_sessions_insert_trigger + AFTER INSERT ON user_session_authentications + FOR EACH ROW + EXECUTE FUNCTION upstream_oauth_authorization_sessions_insert_trigger(); diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260121121150_upstream_oauth_track_user_session_backfill.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260121121150_upstream_oauth_track_user_session_backfill.sql new file mode 100644 index 00000000..c98323e3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260121121150_upstream_oauth_track_user_session_backfill.sql @@ -0,0 +1,13 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Backfill the upstream_oauth_authorization_sessions.user_session_id column +-- based on session authentications +UPDATE upstream_oauth_authorization_sessions +SET user_session_id = user_session_authentications.user_session_id +FROM user_session_authentications +WHERE upstream_oauth_authorization_sessions.user_session_id IS NULL + AND upstream_oauth_authorization_sessions.upstream_oauth_authorization_session_id + = user_session_authentications.upstream_oauth_authorization_session_id; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260122113523_compat_sessions_user_session_no_action.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260122113523_compat_sessions_user_session_no_action.sql new file mode 100644 index 00000000..d86ea229 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260122113523_compat_sessions_user_session_no_action.sql @@ -0,0 +1,19 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Change compat_sessions.user_session_id FK from ON DELETE SET NULL to NO ACTION +-- This ensures user_sessions cannot be deleted while compat_sessions reference them, +-- which is required for backchannel logout propagation to work correctly. +-- +-- Uses NOT VALID to avoid scanning the entire table while holding a lock. +-- A separate migration will validate the constraint. + +ALTER TABLE compat_sessions + DROP CONSTRAINT compat_sessions_user_session_id_fkey, + ADD CONSTRAINT compat_sessions_user_session_id_fkey + FOREIGN KEY (user_session_id) + REFERENCES user_sessions (user_session_id) + ON DELETE NO ACTION + NOT VALID; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260122114353_compat_sessions_user_session_validate.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260122114353_compat_sessions_user_session_validate.sql new file mode 100644 index 00000000..4a5a7f23 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260122114353_compat_sessions_user_session_validate.sql @@ -0,0 +1,9 @@ +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Validate the constraint added in the previous migration. +-- This scans the table but does not hold an exclusive lock. +ALTER TABLE compat_sessions + VALIDATE CONSTRAINT compat_sessions_user_session_id_fkey; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260122123211_idx_oauth2_sessions_finished_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260122123211_idx_oauth2_sessions_finished_at.sql new file mode 100644 index 00000000..e075b345 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260122123211_idx_oauth2_sessions_finished_at.sql @@ -0,0 +1,11 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Adds a partial index on oauth2_sessions.finished_at to help cleaning up +-- finished sessions +CREATE INDEX CONCURRENTLY IF NOT EXISTS "oauth2_sessions_finished_at_idx" + ON "oauth2_sessions" ("finished_at") + WHERE "finished_at" IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260122124231_idx_user_sessions_finished_at.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260122124231_idx_user_sessions_finished_at.sql new file mode 100644 index 00000000..5f09a7ad --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260122124231_idx_user_sessions_finished_at.sql @@ -0,0 +1,11 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Adds a partial index on user_sessions.finished_at to help cleaning up +-- finished sessions +CREATE INDEX CONCURRENTLY IF NOT EXISTS "user_sessions_finished_at_idx" + ON "user_sessions" ("finished_at") + WHERE "finished_at" IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260123090000_idx_oauth2_sessions_inactive_ips.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260123090000_idx_oauth2_sessions_inactive_ips.sql new file mode 100644 index 00000000..6b26e231 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260123090000_idx_oauth2_sessions_inactive_ips.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Partial index for cleaning up IP addresses from inactive OAuth2 sessions +CREATE INDEX CONCURRENTLY IF NOT EXISTS "oauth2_sessions_inactive_ips_idx" + ON "oauth2_sessions" ("last_active_at") + WHERE "last_active_ip" IS NOT NULL AND "last_active_at" IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260123090001_idx_compat_sessions_inactive_ips.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260123090001_idx_compat_sessions_inactive_ips.sql new file mode 100644 index 00000000..01e79bba --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260123090001_idx_compat_sessions_inactive_ips.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Partial index for cleaning up IP addresses from inactive compat sessions +CREATE INDEX CONCURRENTLY IF NOT EXISTS "compat_sessions_inactive_ips_idx" + ON "compat_sessions" ("last_active_at") + WHERE "last_active_ip" IS NOT NULL AND "last_active_at" IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/migrations/20260123090002_idx_user_sessions_inactive_ips.sql b/matrix-authentication-service/crates/storage-pg/migrations/20260123090002_idx_user_sessions_inactive_ips.sql new file mode 100644 index 00000000..c131fb89 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/migrations/20260123090002_idx_user_sessions_inactive_ips.sql @@ -0,0 +1,10 @@ +-- no-transaction +-- Copyright 2026 Element Creations Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE files in the repository root for full details. + +-- Partial index for cleaning up IP addresses from inactive user sessions +CREATE INDEX CONCURRENTLY IF NOT EXISTS "user_sessions_inactive_ips_idx" + ON "user_sessions" ("last_active_at") + WHERE "last_active_ip" IS NOT NULL AND "last_active_at" IS NOT NULL; diff --git a/matrix-authentication-service/crates/storage-pg/src/app_session.rs b/matrix-authentication-service/crates/storage-pg/src/app_session.rs new file mode 100644 index 00000000..2867534c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/app_session.rs @@ -0,0 +1,786 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing PostgreSQL implementation of repositories for sessions + +use async_trait::async_trait; +use mas_data_model::{ + Clock, CompatSession, CompatSessionState, Device, Session, SessionState, User, +}; +use mas_storage::{ + Page, Pagination, + app_session::{AppSession, AppSessionFilter, AppSessionRepository, AppSessionState}, + compat::CompatSessionFilter, + oauth2::OAuth2SessionFilter, +}; +use oauth2_types::scope::{Scope, ScopeToken}; +use opentelemetry_semantic_conventions::trace::DB_QUERY_TEXT; +use sea_query::{ + Alias, ColumnRef, CommonTableExpression, Expr, PostgresQueryBuilder, Query, UnionType, +}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use tracing::Instrument; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseError, ExecuteExt, + errors::DatabaseInconsistencyError, + filter::StatementExt, + iden::{CompatSessions, OAuth2Sessions}, + pagination::QueryBuilderExt, +}; + +/// An implementation of [`AppSessionRepository`] for a PostgreSQL connection +pub struct PgAppSessionRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgAppSessionRepository<'c> { + /// Create a new [`PgAppSessionRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +mod priv_ { + // The enum_def macro generates a public enum, which we don't want, because it + // triggers the missing docs warning + + use std::net::IpAddr; + + use chrono::{DateTime, Utc}; + use mas_storage::pagination::Node; + use sea_query::enum_def; + use ulid::Ulid; + use uuid::Uuid; + + #[derive(sqlx::FromRow)] + #[enum_def] + pub(super) struct AppSessionLookup { + pub(super) cursor: Uuid, + pub(super) compat_session_id: Option, + pub(super) oauth2_session_id: Option, + pub(super) oauth2_client_id: Option, + pub(super) user_session_id: Option, + pub(super) user_id: Option, + pub(super) scope_list: Option>, + pub(super) device_id: Option, + pub(super) human_name: Option, + pub(super) created_at: DateTime, + pub(super) finished_at: Option>, + pub(super) is_synapse_admin: Option, + pub(super) user_agent: Option, + pub(super) last_active_at: Option>, + pub(super) last_active_ip: Option, + } + + impl Node for AppSessionLookup { + fn cursor(&self) -> Ulid { + self.cursor.into() + } + } +} + +use priv_::{AppSessionLookup, AppSessionLookupIden}; + +impl TryFrom for AppSession { + type Error = DatabaseError; + + fn try_from(value: AppSessionLookup) -> Result { + // This is annoying to do, but we have to match on all the fields to determine + // whether it's a compat session or an oauth2 session + let AppSessionLookup { + cursor, + compat_session_id, + oauth2_session_id, + oauth2_client_id, + user_session_id, + user_id, + scope_list, + device_id, + human_name, + created_at, + finished_at, + is_synapse_admin, + user_agent, + last_active_at, + last_active_ip, + } = value; + + let user_session_id = user_session_id.map(Ulid::from); + + match ( + compat_session_id, + oauth2_session_id, + oauth2_client_id, + user_id, + scope_list, + device_id, + is_synapse_admin, + ) { + ( + Some(compat_session_id), + None, + None, + Some(user_id), + None, + device_id_opt, + Some(is_synapse_admin), + ) => { + let id = compat_session_id.into(); + let device = device_id_opt + .map(Device::try_from) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("compat_sessions") + .column("device_id") + .row(id) + .source(e) + })?; + + let state = match finished_at { + None => CompatSessionState::Valid, + Some(finished_at) => CompatSessionState::Finished { finished_at }, + }; + + let session = CompatSession { + id, + state, + user_id: user_id.into(), + device, + human_name, + user_session_id, + created_at, + is_synapse_admin, + user_agent, + last_active_at, + last_active_ip, + }; + + Ok(AppSession::Compat(Box::new(session))) + } + + ( + None, + Some(oauth2_session_id), + Some(oauth2_client_id), + user_id, + Some(scope_list), + None, + None, + ) => { + let id = oauth2_session_id.into(); + let scope: Result = + scope_list.iter().map(|s| s.parse::()).collect(); + let scope = scope.map_err(|e| { + DatabaseInconsistencyError::on("oauth2_sessions") + .column("scope") + .row(id) + .source(e) + })?; + + let state = match value.finished_at { + None => SessionState::Valid, + Some(finished_at) => SessionState::Finished { finished_at }, + }; + + let session = Session { + id, + state, + created_at, + client_id: oauth2_client_id.into(), + user_id: user_id.map(Ulid::from), + user_session_id, + scope, + user_agent, + last_active_at, + last_active_ip, + human_name, + }; + + Ok(AppSession::OAuth2(Box::new(session))) + } + + _ => Err(DatabaseInconsistencyError::on("sessions") + .row(cursor.into()) + .into()), + } + } +} + +/// Split a [`AppSessionFilter`] into two separate filters: a +/// [`CompatSessionFilter`] and an [`OAuth2SessionFilter`]. +fn split_filter( + filter: AppSessionFilter<'_>, +) -> (CompatSessionFilter<'_>, OAuth2SessionFilter<'_>) { + let mut compat_filter = CompatSessionFilter::new(); + let mut oauth2_filter = OAuth2SessionFilter::new(); + + if let Some(user) = filter.user() { + compat_filter = compat_filter.for_user(user); + oauth2_filter = oauth2_filter.for_user(user); + } + + match filter.state() { + Some(AppSessionState::Active) => { + compat_filter = compat_filter.active_only(); + oauth2_filter = oauth2_filter.active_only(); + } + Some(AppSessionState::Finished) => { + compat_filter = compat_filter.finished_only(); + oauth2_filter = oauth2_filter.finished_only(); + } + None => {} + } + + if let Some(device) = filter.device() { + compat_filter = compat_filter.for_device(device); + oauth2_filter = oauth2_filter.for_device(device); + } + + if let Some(browser_session) = filter.browser_session() { + compat_filter = compat_filter.for_browser_session(browser_session); + oauth2_filter = oauth2_filter.for_browser_session(browser_session); + } + + if let Some(last_active_before) = filter.last_active_before() { + compat_filter = compat_filter.with_last_active_before(last_active_before); + oauth2_filter = oauth2_filter.with_last_active_before(last_active_before); + } + + if let Some(last_active_after) = filter.last_active_after() { + compat_filter = compat_filter.with_last_active_after(last_active_after); + oauth2_filter = oauth2_filter.with_last_active_after(last_active_after); + } + + (compat_filter, oauth2_filter) +} + +#[async_trait] +impl AppSessionRepository for PgAppSessionRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.app_session.list", + fields( + db.query.text, + ), + skip_all, + err, + )] + async fn list( + &mut self, + filter: AppSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error> { + let (compat_filter, oauth2_filter) = split_filter(filter); + + let mut oauth2_session_select = Query::select() + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::OAuth2SessionId)), + AppSessionLookupIden::Cursor, + ) + .expr_as(Expr::cust("NULL"), AppSessionLookupIden::CompatSessionId) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::OAuth2SessionId)), + AppSessionLookupIden::Oauth2SessionId, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::OAuth2ClientId)), + AppSessionLookupIden::Oauth2ClientId, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserSessionId)), + AppSessionLookupIden::UserSessionId, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserId)), + AppSessionLookupIden::UserId, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::ScopeList)), + AppSessionLookupIden::ScopeList, + ) + .expr_as(Expr::cust("NULL"), AppSessionLookupIden::DeviceId) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::HumanName)), + AppSessionLookupIden::HumanName, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::CreatedAt)), + AppSessionLookupIden::CreatedAt, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::FinishedAt)), + AppSessionLookupIden::FinishedAt, + ) + .expr_as(Expr::cust("NULL"), AppSessionLookupIden::IsSynapseAdmin) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserAgent)), + AppSessionLookupIden::UserAgent, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::LastActiveAt)), + AppSessionLookupIden::LastActiveAt, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::LastActiveIp)), + AppSessionLookupIden::LastActiveIp, + ) + .from(OAuth2Sessions::Table) + .apply_filter(oauth2_filter) + .clone(); + + let compat_session_select = Query::select() + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::CompatSessionId)), + AppSessionLookupIden::Cursor, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::CompatSessionId)), + AppSessionLookupIden::CompatSessionId, + ) + .expr_as(Expr::cust("NULL"), AppSessionLookupIden::Oauth2SessionId) + .expr_as(Expr::cust("NULL"), AppSessionLookupIden::Oauth2ClientId) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::UserSessionId)), + AppSessionLookupIden::UserSessionId, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::UserId)), + AppSessionLookupIden::UserId, + ) + .expr_as(Expr::cust("NULL"), AppSessionLookupIden::ScopeList) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::DeviceId)), + AppSessionLookupIden::DeviceId, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::HumanName)), + AppSessionLookupIden::HumanName, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::CreatedAt)), + AppSessionLookupIden::CreatedAt, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::FinishedAt)), + AppSessionLookupIden::FinishedAt, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::IsSynapseAdmin)), + AppSessionLookupIden::IsSynapseAdmin, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::UserAgent)), + AppSessionLookupIden::UserAgent, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::LastActiveAt)), + AppSessionLookupIden::LastActiveAt, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::LastActiveIp)), + AppSessionLookupIden::LastActiveIp, + ) + .from(CompatSessions::Table) + .apply_filter(compat_filter) + .clone(); + + let common_table_expression = CommonTableExpression::new() + .query( + oauth2_session_select + .union(UnionType::All, compat_session_select) + .clone(), + ) + .table_name(Alias::new("sessions")) + .clone(); + + let with_clause = Query::with().cte(common_table_expression).clone(); + + let select = Query::select() + .column(ColumnRef::Asterisk) + .from(Alias::new("sessions")) + .generate_pagination(AppSessionLookupIden::Cursor, pagination) + .clone(); + + let (sql, arguments) = with_clause.query(select).build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination.process(edges).try_map(TryFrom::try_from)?; + + Ok(page) + } + + #[tracing::instrument( + name = "db.app_session.count", + fields( + db.query.text, + ), + skip_all, + err, + )] + async fn count(&mut self, filter: AppSessionFilter<'_>) -> Result { + let (compat_filter, oauth2_filter) = split_filter(filter); + let mut oauth2_session_select = Query::select() + .expr(Expr::cust("1")) + .from(OAuth2Sessions::Table) + .apply_filter(oauth2_filter) + .clone(); + + let compat_session_select = Query::select() + .expr(Expr::cust("1")) + .from(CompatSessions::Table) + .apply_filter(compat_filter) + .clone(); + + let common_table_expression = CommonTableExpression::new() + .query( + oauth2_session_select + .union(UnionType::All, compat_session_select) + .clone(), + ) + .table_name(Alias::new("sessions")) + .clone(); + + let with_clause = Query::with().cte(common_table_expression).clone(); + + let select = Query::select() + .expr(Expr::cust("COUNT(*)")) + .from(Alias::new("sessions")) + .clone(); + + let (sql, arguments) = with_clause.query(select).build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.app_session.finish_sessions_to_replace_device", + fields( + db.query.text, + %user.id, + %device_id = device.as_str() + ), + skip_all, + err, + )] + async fn finish_sessions_to_replace_device( + &mut self, + clock: &dyn Clock, + user: &User, + device: &Device, + ) -> Result { + let mut affected = false; + // TODO need to invoke this from all the oauth2 login sites + let span = tracing::info_span!( + "db.app_session.finish_sessions_to_replace_device.compat_sessions", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + let finished_at = clock.now(); + let compat_affected = sqlx::query!( + " + UPDATE compat_sessions SET finished_at = $3 WHERE user_id = $1 AND device_id = $2 AND finished_at IS NULL + ", + Uuid::from(user.id), + device.as_str(), + finished_at + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await? + .rows_affected(); + affected |= compat_affected > 0; + + if let Ok([stable_device_as_scope_token, unstable_device_as_scope_token]) = + device.to_scope_token() + { + let span = tracing::info_span!( + "db.app_session.finish_sessions_to_replace_device.oauth2_sessions", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + let oauth2_affected = sqlx::query!( + " + UPDATE oauth2_sessions + SET finished_at = $4 + WHERE user_id = $1 + AND ($2 = ANY(scope_list) OR $3 = ANY(scope_list)) + AND finished_at IS NULL + ", + Uuid::from(user.id), + stable_device_as_scope_token.as_str(), + unstable_device_as_scope_token.as_str(), + finished_at + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await? + .rows_affected(); + affected |= oauth2_affected > 0; + } + + Ok(affected) + } +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use mas_data_model::{Device, clock::MockClock}; + use mas_storage::{ + Pagination, RepositoryAccess, + app_session::{AppSession, AppSessionFilter}, + oauth2::OAuth2SessionRepository, + }; + use oauth2_types::{ + requests::GrantType, + scope::{OPENID, Scope}, + }; + use rand::SeedableRng; + use rand_chacha::ChaChaRng; + use sqlx::PgPool; + + use crate::PgRepository; + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_app_repo(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + + // Create a user + let user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + + let all = AppSessionFilter::new().for_user(&user); + let active = all.active_only(); + let finished = all.finished_only(); + let pagination = Pagination::first(10); + + assert_eq!(repo.app_session().count(all).await.unwrap(), 0); + assert_eq!(repo.app_session().count(active).await.unwrap(), 0); + assert_eq!(repo.app_session().count(finished).await.unwrap(), 0); + + let full_list = repo.app_session().list(all, pagination).await.unwrap(); + assert!(full_list.edges.is_empty()); + let active_list = repo.app_session().list(active, pagination).await.unwrap(); + assert!(active_list.edges.is_empty()); + let finished_list = repo.app_session().list(finished, pagination).await.unwrap(); + assert!(finished_list.edges.is_empty()); + + // Start a compat session for that user + let device = Device::generate(&mut rng); + let compat_session = repo + .compat_session() + .add(&mut rng, &clock, &user, device.clone(), None, false, None) + .await + .unwrap(); + + assert_eq!(repo.app_session().count(all).await.unwrap(), 1); + assert_eq!(repo.app_session().count(active).await.unwrap(), 1); + assert_eq!(repo.app_session().count(finished).await.unwrap(), 0); + + let full_list = repo.app_session().list(all, pagination).await.unwrap(); + assert_eq!(full_list.edges.len(), 1); + assert_eq!( + full_list.edges[0].node, + AppSession::Compat(Box::new(compat_session.clone())) + ); + let active_list = repo.app_session().list(active, pagination).await.unwrap(); + assert_eq!(active_list.edges.len(), 1); + assert_eq!( + active_list.edges[0].node, + AppSession::Compat(Box::new(compat_session.clone())) + ); + let finished_list = repo.app_session().list(finished, pagination).await.unwrap(); + assert!(finished_list.edges.is_empty()); + + // Finish the session + let compat_session = repo + .compat_session() + .finish(&clock, compat_session) + .await + .unwrap(); + + assert_eq!(repo.app_session().count(all).await.unwrap(), 1); + assert_eq!(repo.app_session().count(active).await.unwrap(), 0); + assert_eq!(repo.app_session().count(finished).await.unwrap(), 1); + + let full_list = repo.app_session().list(all, pagination).await.unwrap(); + assert_eq!(full_list.edges.len(), 1); + assert_eq!( + full_list.edges[0].node, + AppSession::Compat(Box::new(compat_session.clone())) + ); + let active_list = repo.app_session().list(active, pagination).await.unwrap(); + assert!(active_list.edges.is_empty()); + let finished_list = repo.app_session().list(finished, pagination).await.unwrap(); + assert_eq!(finished_list.edges.len(), 1); + assert_eq!( + finished_list.edges[0].node, + AppSession::Compat(Box::new(compat_session.clone())) + ); + + // Start an OAuth2 session + let client = repo + .oauth2_client() + .add( + &mut rng, + &clock, + vec!["https://example.com/redirect".parse().unwrap()], + None, + None, + None, + vec![GrantType::AuthorizationCode], + Some("First client".to_owned()), + Some("https://example.com/logo.png".parse().unwrap()), + Some("https://example.com/".parse().unwrap()), + Some("https://example.com/policy".parse().unwrap()), + Some("https://example.com/tos".parse().unwrap()), + Some("https://example.com/jwks.json".parse().unwrap()), + None, + None, + None, + None, + None, + Some("https://example.com/login".parse().unwrap()), + ) + .await + .unwrap(); + + let device2 = Device::generate(&mut rng); + let scope: Scope = [OPENID] + .into_iter() + .chain(device2.to_scope_token().unwrap().into_iter()) + .collect(); + + // We're moving the clock forward by 1 minute between each session to ensure + // we're getting consistent ordering in lists. + clock.advance(Duration::try_minutes(1).unwrap()); + + let oauth_session = repo + .oauth2_session() + .add(&mut rng, &clock, &client, Some(&user), None, scope) + .await + .unwrap(); + + assert_eq!(repo.app_session().count(all).await.unwrap(), 2); + assert_eq!(repo.app_session().count(active).await.unwrap(), 1); + assert_eq!(repo.app_session().count(finished).await.unwrap(), 1); + + let full_list = repo.app_session().list(all, pagination).await.unwrap(); + assert_eq!(full_list.edges.len(), 2); + assert_eq!( + full_list.edges[0].node, + AppSession::Compat(Box::new(compat_session.clone())) + ); + assert_eq!( + full_list.edges[1].node, + AppSession::OAuth2(Box::new(oauth_session.clone())) + ); + + let active_list = repo.app_session().list(active, pagination).await.unwrap(); + assert_eq!(active_list.edges.len(), 1); + assert_eq!( + active_list.edges[0].node, + AppSession::OAuth2(Box::new(oauth_session.clone())) + ); + + let finished_list = repo.app_session().list(finished, pagination).await.unwrap(); + assert_eq!(finished_list.edges.len(), 1); + assert_eq!( + finished_list.edges[0].node, + AppSession::Compat(Box::new(compat_session.clone())) + ); + + // Finish the session + let oauth_session = repo + .oauth2_session() + .finish(&clock, oauth_session) + .await + .unwrap(); + + assert_eq!(repo.app_session().count(all).await.unwrap(), 2); + assert_eq!(repo.app_session().count(active).await.unwrap(), 0); + assert_eq!(repo.app_session().count(finished).await.unwrap(), 2); + + let full_list = repo.app_session().list(all, pagination).await.unwrap(); + assert_eq!(full_list.edges.len(), 2); + assert_eq!( + full_list.edges[0].node, + AppSession::Compat(Box::new(compat_session.clone())) + ); + assert_eq!( + full_list.edges[1].node, + AppSession::OAuth2(Box::new(oauth_session.clone())) + ); + + let active_list = repo.app_session().list(active, pagination).await.unwrap(); + assert!(active_list.edges.is_empty()); + + let finished_list = repo.app_session().list(finished, pagination).await.unwrap(); + assert_eq!(finished_list.edges.len(), 2); + assert_eq!( + finished_list.edges[0].node, + AppSession::Compat(Box::new(compat_session.clone())) + ); + assert_eq!( + full_list.edges[1].node, + AppSession::OAuth2(Box::new(oauth_session.clone())) + ); + + // Query by device + let filter = AppSessionFilter::new().for_device(&device); + assert_eq!(repo.app_session().count(filter).await.unwrap(), 1); + let list = repo.app_session().list(filter, pagination).await.unwrap(); + assert_eq!(list.edges.len(), 1); + assert_eq!( + list.edges[0].node, + AppSession::Compat(Box::new(compat_session.clone())) + ); + + let filter = AppSessionFilter::new().for_device(&device2); + assert_eq!(repo.app_session().count(filter).await.unwrap(), 1); + let list = repo.app_session().list(filter, pagination).await.unwrap(); + assert_eq!(list.edges.len(), 1); + assert_eq!( + list.edges[0].node, + AppSession::OAuth2(Box::new(oauth_session.clone())) + ); + + // Create a second user + let user2 = repo + .user() + .add(&mut rng, &clock, "alice".to_owned()) + .await + .unwrap(); + + // If we list/count for this user, we should get nothing + let filter = AppSessionFilter::new().for_user(&user2); + assert_eq!(repo.app_session().count(filter).await.unwrap(), 0); + let list = repo.app_session().list(filter, pagination).await.unwrap(); + assert!(list.edges.is_empty()); + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/compat/access_token.rs b/matrix-authentication-service/crates/storage-pg/src/compat/access_token.rs new file mode 100644 index 00000000..8e68f076 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/compat/access_token.rs @@ -0,0 +1,210 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Duration, Utc}; +use mas_data_model::{Clock, CompatAccessToken, CompatSession}; +use mas_storage::compat::CompatAccessTokenRepository; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, tracing::ExecuteExt}; + +/// An implementation of [`CompatAccessTokenRepository`] for a PostgreSQL +/// connection +pub struct PgCompatAccessTokenRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgCompatAccessTokenRepository<'c> { + /// Create a new [`PgCompatAccessTokenRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct CompatAccessTokenLookup { + compat_access_token_id: Uuid, + access_token: String, + created_at: DateTime, + expires_at: Option>, + compat_session_id: Uuid, +} + +impl From for CompatAccessToken { + fn from(value: CompatAccessTokenLookup) -> Self { + Self { + id: value.compat_access_token_id.into(), + session_id: value.compat_session_id.into(), + token: value.access_token, + created_at: value.created_at, + expires_at: value.expires_at, + } + } +} + +#[async_trait] +impl CompatAccessTokenRepository for PgCompatAccessTokenRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.compat_access_token.lookup", + skip_all, + fields( + db.query.text, + compat_session.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + CompatAccessTokenLookup, + r#" + SELECT compat_access_token_id + , access_token + , created_at + , expires_at + , compat_session_id + + FROM compat_access_tokens + + WHERE compat_access_token_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.compat_access_token.find_by_token", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn find_by_token( + &mut self, + access_token: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + CompatAccessTokenLookup, + r#" + SELECT compat_access_token_id + , access_token + , created_at + , expires_at + , compat_session_id + + FROM compat_access_tokens + + WHERE access_token = $1 + "#, + access_token, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.compat_access_token.add", + skip_all, + fields( + db.query.text, + compat_access_token.id, + %compat_session.id, + user.id = %compat_session.user_id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + compat_session: &CompatSession, + token: String, + expires_after: Option, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("compat_access_token.id", tracing::field::display(id)); + + let expires_at = expires_after.map(|expires_after| created_at + expires_after); + + sqlx::query!( + r#" + INSERT INTO compat_access_tokens + (compat_access_token_id, compat_session_id, access_token, created_at, expires_at) + VALUES ($1, $2, $3, $4, $5) + "#, + Uuid::from(id), + Uuid::from(compat_session.id), + token, + created_at, + expires_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(CompatAccessToken { + id, + session_id: compat_session.id, + token, + created_at, + expires_at, + }) + } + + #[tracing::instrument( + name = "db.compat_access_token.expire", + skip_all, + fields( + db.query.text, + %compat_access_token.id, + compat_session.id = %compat_access_token.session_id, + ), + err, + )] + async fn expire( + &mut self, + clock: &dyn Clock, + mut compat_access_token: CompatAccessToken, + ) -> Result { + let expires_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE compat_access_tokens + SET expires_at = $2 + WHERE compat_access_token_id = $1 + "#, + Uuid::from(compat_access_token.id), + expires_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + compat_access_token.expires_at = Some(expires_at); + Ok(compat_access_token) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/compat/mod.rs b/matrix-authentication-service/crates/storage-pg/src/compat/mod.rs new file mode 100644 index 00000000..b9ce82da --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/compat/mod.rs @@ -0,0 +1,753 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing PostgreSQL implementation of repositories for the +//! compatibility layer + +mod access_token; +mod refresh_token; +mod session; +mod sso_login; + +pub use self::{ + access_token::PgCompatAccessTokenRepository, refresh_token::PgCompatRefreshTokenRepository, + session::PgCompatSessionRepository, sso_login::PgCompatSsoLoginRepository, +}; + +#[cfg(test)] +mod tests { + use chrono::Duration; + use mas_data_model::{Clock, Device, clock::MockClock}; + use mas_storage::{ + Pagination, RepositoryAccess, + compat::{ + CompatAccessTokenRepository, CompatRefreshTokenRepository, CompatSessionFilter, + CompatSessionRepository, CompatSsoLoginFilter, + }, + user::UserRepository, + }; + use rand::SeedableRng; + use rand_chacha::ChaChaRng; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::PgRepository; + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_session_repository(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + + // Create a user + let user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + + let all = CompatSessionFilter::new().for_user(&user); + let active = all.active_only(); + let finished = all.finished_only(); + let pagination = Pagination::first(10); + + assert_eq!(repo.compat_session().count(all).await.unwrap(), 0); + assert_eq!(repo.compat_session().count(active).await.unwrap(), 0); + assert_eq!(repo.compat_session().count(finished).await.unwrap(), 0); + + let full_list = repo.compat_session().list(all, pagination).await.unwrap(); + assert!(full_list.edges.is_empty()); + let active_list = repo + .compat_session() + .list(active, pagination) + .await + .unwrap(); + assert!(active_list.edges.is_empty()); + let finished_list = repo + .compat_session() + .list(finished, pagination) + .await + .unwrap(); + assert!(finished_list.edges.is_empty()); + + // Start a compat session for that user + let device = Device::generate(&mut rng); + let device_str = device.as_str().to_owned(); + let session = repo + .compat_session() + .add(&mut rng, &clock, &user, device.clone(), None, false, None) + .await + .unwrap(); + assert_eq!(session.user_id, user.id); + assert_eq!(session.device.as_ref().unwrap().as_str(), device_str); + assert!(session.is_valid()); + assert!(!session.is_finished()); + + assert_eq!(repo.compat_session().count(all).await.unwrap(), 1); + assert_eq!(repo.compat_session().count(active).await.unwrap(), 1); + assert_eq!(repo.compat_session().count(finished).await.unwrap(), 0); + + let full_list = repo.compat_session().list(all, pagination).await.unwrap(); + assert_eq!(full_list.edges.len(), 1); + assert_eq!(full_list.edges[0].node.0.id, session.id); + let active_list = repo + .compat_session() + .list(active, pagination) + .await + .unwrap(); + assert_eq!(active_list.edges.len(), 1); + assert_eq!(active_list.edges[0].node.0.id, session.id); + let finished_list = repo + .compat_session() + .list(finished, pagination) + .await + .unwrap(); + assert!(finished_list.edges.is_empty()); + + // Lookup the session and check it didn't change + let session_lookup = repo + .compat_session() + .lookup(session.id) + .await + .unwrap() + .expect("compat session not found"); + assert_eq!(session_lookup.id, session.id); + assert_eq!(session_lookup.user_id, user.id); + assert_eq!(session.device.as_ref().unwrap().as_str(), device_str); + assert!(session_lookup.is_valid()); + assert!(!session_lookup.is_finished()); + + // Record a user-agent for the session + assert!(session_lookup.user_agent.is_none()); + let session = repo + .compat_session() + .record_user_agent(session_lookup, "Mozilla/5.0".to_owned()) + .await + .unwrap(); + assert_eq!(session.user_agent.as_deref(), Some("Mozilla/5.0")); + + // Reload the session and check again + let session_lookup = repo + .compat_session() + .lookup(session.id) + .await + .unwrap() + .expect("compat session not found"); + assert_eq!(session_lookup.user_agent.as_deref(), Some("Mozilla/5.0")); + + // Look up the session by device + let list = repo + .compat_session() + .list( + CompatSessionFilter::new() + .for_user(&user) + .for_device(&device), + pagination, + ) + .await + .unwrap(); + assert_eq!(list.edges.len(), 1); + let session_lookup = &list.edges[0].node.0; + assert_eq!(session_lookup.id, session.id); + assert_eq!(session_lookup.user_id, user.id); + assert_eq!(session.device.as_ref().unwrap().as_str(), device_str); + assert!(session_lookup.is_valid()); + assert!(!session_lookup.is_finished()); + + // Finish the session + let session = repo.compat_session().finish(&clock, session).await.unwrap(); + assert!(!session.is_valid()); + assert!(session.is_finished()); + + assert_eq!(repo.compat_session().count(all).await.unwrap(), 1); + assert_eq!(repo.compat_session().count(active).await.unwrap(), 0); + assert_eq!(repo.compat_session().count(finished).await.unwrap(), 1); + + let full_list = repo.compat_session().list(all, pagination).await.unwrap(); + assert_eq!(full_list.edges.len(), 1); + assert_eq!(full_list.edges[0].node.0.id, session.id); + let active_list = repo + .compat_session() + .list(active, pagination) + .await + .unwrap(); + assert!(active_list.edges.is_empty()); + let finished_list = repo + .compat_session() + .list(finished, pagination) + .await + .unwrap(); + assert_eq!(finished_list.edges.len(), 1); + assert_eq!(finished_list.edges[0].node.0.id, session.id); + + // Reload the session and check again + let session_lookup = repo + .compat_session() + .lookup(session.id) + .await + .unwrap() + .expect("compat session not found"); + assert!(!session_lookup.is_valid()); + assert!(session_lookup.is_finished()); + + // Now add another session, with an SSO login this time + let unknown_session = session; + // Start a new SSO login + let login = repo + .compat_sso_login() + .add( + &mut rng, + &clock, + "login-token".to_owned(), + "https://example.com/callback".parse().unwrap(), + ) + .await + .unwrap(); + assert!(login.is_pending()); + + // Start a browser session for the user + let browser_session = repo + .browser_session() + .add(&mut rng, &clock, &user, None) + .await + .unwrap(); + + // Start a compat session for that user + let device = Device::generate(&mut rng); + let sso_login_session = repo + .compat_session() + .add( + &mut rng, + &clock, + &user, + device, + Some(&browser_session), + false, + None, + ) + .await + .unwrap(); + + // Associate the login with the session + let login = repo + .compat_sso_login() + .fulfill(&clock, login, &browser_session) + .await + .unwrap(); + assert!(login.is_fulfilled()); + let login = repo + .compat_sso_login() + .exchange(&clock, login, &sso_login_session) + .await + .unwrap(); + assert!(login.is_exchanged()); + + // Now query the session list with both the unknown and SSO login session type + // filter + let all = CompatSessionFilter::new().for_user(&user); + let sso_login = all.sso_login_only(); + let unknown = all.unknown_only(); + assert_eq!(repo.compat_session().count(all).await.unwrap(), 2); + assert_eq!(repo.compat_session().count(sso_login).await.unwrap(), 1); + assert_eq!(repo.compat_session().count(unknown).await.unwrap(), 1); + + let list = repo + .compat_session() + .list(sso_login, pagination) + .await + .unwrap(); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node.0.id, sso_login_session.id); + let list = repo + .compat_session() + .list(unknown, pagination) + .await + .unwrap(); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node.0.id, unknown_session.id); + + // Check that combining the two filters works + // At this point, there is one active SSO login session and one finished unknown + // session + assert_eq!( + repo.compat_session() + .count(all.sso_login_only().active_only()) + .await + .unwrap(), + 1 + ); + assert_eq!( + repo.compat_session() + .count(all.sso_login_only().finished_only()) + .await + .unwrap(), + 0 + ); + assert_eq!( + repo.compat_session() + .count(all.unknown_only().active_only()) + .await + .unwrap(), + 0 + ); + assert_eq!( + repo.compat_session() + .count(all.unknown_only().finished_only()) + .await + .unwrap(), + 1 + ); + + // Check that we can batch finish sessions + let affected = repo + .compat_session() + .finish_bulk(&clock, all.sso_login_only().active_only()) + .await + .unwrap(); + assert_eq!(affected, 1); + assert_eq!(repo.compat_session().count(finished).await.unwrap(), 2); + assert_eq!(repo.compat_session().count(active).await.unwrap(), 0); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_access_token_repository(pool: PgPool) { + const FIRST_TOKEN: &str = "first_access_token"; + const SECOND_TOKEN: &str = "second_access_token"; + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Create a user + let user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + + // Start a compat session for that user + let device = Device::generate(&mut rng); + let session = repo + .compat_session() + .add(&mut rng, &clock, &user, device, None, false, None) + .await + .unwrap(); + + // Add an access token to that session + let token = repo + .compat_access_token() + .add( + &mut rng, + &clock, + &session, + FIRST_TOKEN.to_owned(), + Some(Duration::try_minutes(1).unwrap()), + ) + .await + .unwrap(); + assert_eq!(token.session_id, session.id); + assert_eq!(token.token, FIRST_TOKEN); + + // Commit the txn and grab a new transaction, to test a conflict + repo.save().await.unwrap(); + + { + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + // Adding the same token a second time should conflict + assert!( + repo.compat_access_token() + .add( + &mut rng, + &clock, + &session, + FIRST_TOKEN.to_owned(), + Some(Duration::try_minutes(1).unwrap()), + ) + .await + .is_err() + ); + repo.cancel().await.unwrap(); + } + + // Grab a new repo + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Looking up via ID works + let token_lookup = repo + .compat_access_token() + .lookup(token.id) + .await + .unwrap() + .expect("compat access token not found"); + assert_eq!(token.id, token_lookup.id); + assert_eq!(token_lookup.session_id, session.id); + + // Looking up via the token value works + let token_lookup = repo + .compat_access_token() + .find_by_token(FIRST_TOKEN) + .await + .unwrap() + .expect("compat access token not found"); + assert_eq!(token.id, token_lookup.id); + assert_eq!(token_lookup.session_id, session.id); + + // Token is currently valid + assert!(token.is_valid(clock.now())); + + clock.advance(Duration::try_minutes(1).unwrap()); + // Token should have expired + assert!(!token.is_valid(clock.now())); + + // Add a second access token, this time without expiration + let token = repo + .compat_access_token() + .add(&mut rng, &clock, &session, SECOND_TOKEN.to_owned(), None) + .await + .unwrap(); + assert_eq!(token.session_id, session.id); + assert_eq!(token.token, SECOND_TOKEN); + + // Token is currently valid + assert!(token.is_valid(clock.now())); + + // Make it expire + repo.compat_access_token() + .expire(&clock, token) + .await + .unwrap(); + + // Reload it + let token = repo + .compat_access_token() + .find_by_token(SECOND_TOKEN) + .await + .unwrap() + .expect("compat access token not found"); + + // Token is not valid anymore + assert!(!token.is_valid(clock.now())); + + repo.save().await.unwrap(); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_refresh_token_repository(pool: PgPool) { + const ACCESS_TOKEN: &str = "access_token"; + const REFRESH_TOKEN: &str = "refresh_token"; + const REFRESH_TOKEN2: &str = "refresh_token2"; + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Create a user + let user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + + // Start a compat session for that user + let device = Device::generate(&mut rng); + let session = repo + .compat_session() + .add(&mut rng, &clock, &user, device, None, false, None) + .await + .unwrap(); + + // Add an access token to that session + let access_token = repo + .compat_access_token() + .add(&mut rng, &clock, &session, ACCESS_TOKEN.to_owned(), None) + .await + .unwrap(); + + let refresh_token = repo + .compat_refresh_token() + .add( + &mut rng, + &clock, + &session, + &access_token, + REFRESH_TOKEN.to_owned(), + ) + .await + .unwrap(); + assert_eq!(refresh_token.session_id, session.id); + assert_eq!(refresh_token.access_token_id, access_token.id); + assert_eq!(refresh_token.token, REFRESH_TOKEN); + assert!(refresh_token.is_valid()); + assert!(!refresh_token.is_consumed()); + + // Look it up by ID and check everything matches + let refresh_token_lookup = repo + .compat_refresh_token() + .lookup(refresh_token.id) + .await + .unwrap() + .expect("refresh token not found"); + assert_eq!(refresh_token_lookup.id, refresh_token.id); + assert_eq!(refresh_token_lookup.session_id, session.id); + assert_eq!(refresh_token_lookup.access_token_id, access_token.id); + assert_eq!(refresh_token_lookup.token, REFRESH_TOKEN); + assert!(refresh_token_lookup.is_valid()); + assert!(!refresh_token_lookup.is_consumed()); + + // Look it up by token and check everything matches + let refresh_token_lookup = repo + .compat_refresh_token() + .find_by_token(REFRESH_TOKEN) + .await + .unwrap() + .expect("refresh token not found"); + assert_eq!(refresh_token_lookup.id, refresh_token.id); + assert_eq!(refresh_token_lookup.session_id, session.id); + assert_eq!(refresh_token_lookup.access_token_id, access_token.id); + assert_eq!(refresh_token_lookup.token, REFRESH_TOKEN); + assert!(refresh_token_lookup.is_valid()); + assert!(!refresh_token_lookup.is_consumed()); + + // Consume the first token, but to do so we need a 2nd to replace it with + let refresh_token2 = repo + .compat_refresh_token() + .add( + &mut rng, + &clock, + &session, + &access_token, + REFRESH_TOKEN2.to_owned(), + ) + .await + .unwrap(); + + let refresh_token = repo + .compat_refresh_token() + .consume_and_replace(&clock, refresh_token, &refresh_token2) + .await + .unwrap(); + assert!(!refresh_token.is_valid()); + assert!(refresh_token.is_consumed()); + + // Reload the first token and check again + let refresh_token_lookup = repo + .compat_refresh_token() + .find_by_token(REFRESH_TOKEN) + .await + .unwrap() + .expect("refresh token not found"); + assert!(!refresh_token_lookup.is_valid()); + assert!(refresh_token_lookup.is_consumed()); + + // Consuming it again should not work + assert!( + repo.compat_refresh_token() + .consume_and_replace(&clock, refresh_token, &refresh_token2) + .await + .is_err() + ); + + repo.save().await.unwrap(); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_compat_sso_login_repository(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Create a user + let user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + + // Lookup an unknown SSO login + let login = repo.compat_sso_login().lookup(Ulid::nil()).await.unwrap(); + assert_eq!(login, None); + + let all = CompatSsoLoginFilter::new(); + let for_user = all.for_user(&user); + let pending = all.pending_only(); + let fulfilled = all.fulfilled_only(); + let exchanged = all.exchanged_only(); + + // Check the initial counts + assert_eq!(repo.compat_sso_login().count(all).await.unwrap(), 0); + assert_eq!(repo.compat_sso_login().count(for_user).await.unwrap(), 0); + assert_eq!(repo.compat_sso_login().count(pending).await.unwrap(), 0); + assert_eq!(repo.compat_sso_login().count(fulfilled).await.unwrap(), 0); + assert_eq!(repo.compat_sso_login().count(exchanged).await.unwrap(), 0); + + // Lookup an unknown login token + let login = repo + .compat_sso_login() + .find_by_token("login-token") + .await + .unwrap(); + assert_eq!(login, None); + + // Start a new SSO login + let login = repo + .compat_sso_login() + .add( + &mut rng, + &clock, + "login-token".to_owned(), + "https://example.com/callback".parse().unwrap(), + ) + .await + .unwrap(); + assert!(login.is_pending()); + + // Check the counts + assert_eq!(repo.compat_sso_login().count(all).await.unwrap(), 1); + assert_eq!(repo.compat_sso_login().count(for_user).await.unwrap(), 0); + assert_eq!(repo.compat_sso_login().count(pending).await.unwrap(), 1); + assert_eq!(repo.compat_sso_login().count(fulfilled).await.unwrap(), 0); + assert_eq!(repo.compat_sso_login().count(exchanged).await.unwrap(), 0); + + // Lookup the login by ID + let login_lookup = repo + .compat_sso_login() + .lookup(login.id) + .await + .unwrap() + .expect("login not found"); + assert_eq!(login_lookup, login); + + // Find the login by token + let login_lookup = repo + .compat_sso_login() + .find_by_token("login-token") + .await + .unwrap() + .expect("login not found"); + assert_eq!(login_lookup, login); + + // Start a compat session for that user + let device = Device::generate(&mut rng); + let compat_session = repo + .compat_session() + .add(&mut rng, &clock, &user, device, None, false, None) + .await + .unwrap(); + + // Exchanging before fulfilling should not work + // Note: It should also not poison the SQL transaction + let res = repo + .compat_sso_login() + .exchange(&clock, login.clone(), &compat_session) + .await; + assert!(res.is_err()); + + // Start a browser session for that user + let browser_session = repo + .browser_session() + .add(&mut rng, &clock, &user, None) + .await + .unwrap(); + + // Associate the login with the session + let login = repo + .compat_sso_login() + .fulfill(&clock, login, &browser_session) + .await + .unwrap(); + assert!(login.is_fulfilled()); + + // Check the counts + assert_eq!(repo.compat_sso_login().count(all).await.unwrap(), 1); + assert_eq!(repo.compat_sso_login().count(for_user).await.unwrap(), 1); + assert_eq!(repo.compat_sso_login().count(pending).await.unwrap(), 0); + assert_eq!(repo.compat_sso_login().count(fulfilled).await.unwrap(), 1); + assert_eq!(repo.compat_sso_login().count(exchanged).await.unwrap(), 0); + + // Fulfilling again should not work + // Note: It should also not poison the SQL transaction + let res = repo + .compat_sso_login() + .fulfill(&clock, login.clone(), &browser_session) + .await; + assert!(res.is_err()); + + // Exchange that login + let login = repo + .compat_sso_login() + .exchange(&clock, login, &compat_session) + .await + .unwrap(); + assert!(login.is_exchanged()); + + // Check the counts + assert_eq!(repo.compat_sso_login().count(all).await.unwrap(), 1); + assert_eq!(repo.compat_sso_login().count(for_user).await.unwrap(), 1); + assert_eq!(repo.compat_sso_login().count(pending).await.unwrap(), 0); + assert_eq!(repo.compat_sso_login().count(fulfilled).await.unwrap(), 0); + assert_eq!(repo.compat_sso_login().count(exchanged).await.unwrap(), 1); + + // Exchange again should not work + // Note: It should also not poison the SQL transaction + let res = repo + .compat_sso_login() + .exchange(&clock, login.clone(), &compat_session) + .await; + assert!(res.is_err()); + + // Fulfilling after exchanging should not work + // Note: It should also not poison the SQL transaction + let res = repo + .compat_sso_login() + .fulfill(&clock, login.clone(), &browser_session) + .await; + assert!(res.is_err()); + + let pagination = Pagination::first(10); + + // List all logins + let logins = repo.compat_sso_login().list(all, pagination).await.unwrap(); + assert!(!logins.has_next_page); + assert_eq!(logins.edges.len(), 1); + assert_eq!(logins.edges[0].node, login); + + // List the logins for the user + let logins = repo + .compat_sso_login() + .list(for_user, pagination) + .await + .unwrap(); + assert!(!logins.has_next_page); + assert_eq!(logins.edges.len(), 1); + assert_eq!(logins.edges[0].node, login); + + // List only the pending logins for the user + let logins = repo + .compat_sso_login() + .list(for_user.pending_only(), pagination) + .await + .unwrap(); + assert!(!logins.has_next_page); + assert!(logins.edges.is_empty()); + + // List only the fulfilled logins for the user + let logins = repo + .compat_sso_login() + .list(for_user.fulfilled_only(), pagination) + .await + .unwrap(); + assert!(!logins.has_next_page); + assert!(logins.edges.is_empty()); + + // List only the exchanged logins for the user + let logins = repo + .compat_sso_login() + .list(for_user.exchanged_only(), pagination) + .await + .unwrap(); + assert!(!logins.has_next_page); + assert_eq!(logins.edges.len(), 1); + assert_eq!(logins.edges[0].node, login); + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/compat/refresh_token.rs b/matrix-authentication-service/crates/storage-pg/src/compat/refresh_token.rs new file mode 100644 index 00000000..3cb9d794 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/compat/refresh_token.rs @@ -0,0 +1,241 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + Clock, CompatAccessToken, CompatRefreshToken, CompatRefreshTokenState, CompatSession, +}; +use mas_storage::compat::CompatRefreshTokenRepository; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, tracing::ExecuteExt}; + +/// An implementation of [`CompatRefreshTokenRepository`] for a PostgreSQL +/// connection +pub struct PgCompatRefreshTokenRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgCompatRefreshTokenRepository<'c> { + /// Create a new [`PgCompatRefreshTokenRepository`] from an active + /// PostgreSQL connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct CompatRefreshTokenLookup { + compat_refresh_token_id: Uuid, + refresh_token: String, + created_at: DateTime, + consumed_at: Option>, + compat_access_token_id: Uuid, + compat_session_id: Uuid, +} + +impl From for CompatRefreshToken { + fn from(value: CompatRefreshTokenLookup) -> Self { + let state = match value.consumed_at { + Some(consumed_at) => CompatRefreshTokenState::Consumed { consumed_at }, + None => CompatRefreshTokenState::Valid, + }; + + Self { + id: value.compat_refresh_token_id.into(), + state, + session_id: value.compat_session_id.into(), + token: value.refresh_token, + created_at: value.created_at, + access_token_id: value.compat_access_token_id.into(), + } + } +} + +#[async_trait] +impl CompatRefreshTokenRepository for PgCompatRefreshTokenRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.compat_refresh_token.lookup", + skip_all, + fields( + db.query.text, + compat_refresh_token.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + CompatRefreshTokenLookup, + r#" + SELECT compat_refresh_token_id + , refresh_token + , created_at + , consumed_at + , compat_session_id + , compat_access_token_id + + FROM compat_refresh_tokens + + WHERE compat_refresh_token_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.compat_refresh_token.find_by_token", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn find_by_token( + &mut self, + refresh_token: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + CompatRefreshTokenLookup, + r#" + SELECT compat_refresh_token_id + , refresh_token + , created_at + , consumed_at + , compat_session_id + , compat_access_token_id + + FROM compat_refresh_tokens + + WHERE refresh_token = $1 + "#, + refresh_token, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.compat_refresh_token.add", + skip_all, + fields( + db.query.text, + compat_refresh_token.id, + %compat_session.id, + user.id = %compat_session.user_id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + compat_session: &CompatSession, + compat_access_token: &CompatAccessToken, + token: String, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("compat_refresh_token.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO compat_refresh_tokens + (compat_refresh_token_id, compat_session_id, + compat_access_token_id, refresh_token, created_at) + VALUES ($1, $2, $3, $4, $5) + "#, + Uuid::from(id), + Uuid::from(compat_session.id), + Uuid::from(compat_access_token.id), + token, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(CompatRefreshToken { + id, + state: CompatRefreshTokenState::default(), + session_id: compat_session.id, + access_token_id: compat_access_token.id, + token, + created_at, + }) + } + + #[tracing::instrument( + name = "db.compat_refresh_token.consume_and_replace", + skip_all, + fields( + db.query.text, + %compat_refresh_token.id, + %successor_compat_refresh_token.id, + compat_session.id = %compat_refresh_token.session_id, + ), + err, + )] + async fn consume_and_replace( + &mut self, + clock: &dyn Clock, + compat_refresh_token: CompatRefreshToken, + successor_compat_refresh_token: &CompatRefreshToken, + ) -> Result { + if compat_refresh_token.session_id != successor_compat_refresh_token.session_id { + return Err(DatabaseError::invalid_operation()); + } + + let consumed_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE compat_refresh_tokens + SET consumed_at = $2 + WHERE compat_session_id = $1 + AND consumed_at IS NULL + AND compat_refresh_token_id <> $3 + "#, + Uuid::from(compat_refresh_token.session_id), + consumed_at, + Uuid::from(successor_compat_refresh_token.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + // This can affect multiple rows in case we've imported refresh tokens + // from Synapse. What we care about is that it at least affected one, + // which is what we're checking here + if res.rows_affected() == 0 { + return Err(DatabaseError::RowsAffected { + expected: 1, + actual: 0, + }); + } + + let compat_refresh_token = compat_refresh_token + .consume(consumed_at) + .map_err(DatabaseError::to_invalid_operation)?; + + Ok(compat_refresh_token) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/compat/session.rs b/matrix-authentication-service/crates/storage-pg/src/compat/session.rs new file mode 100644 index 00000000..e7612fdc --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/compat/session.rs @@ -0,0 +1,814 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + BrowserSession, Clock, CompatSession, CompatSessionState, CompatSsoLogin, CompatSsoLoginState, + Device, User, +}; +use mas_storage::{ + Page, Pagination, + compat::{CompatSessionFilter, CompatSessionRepository}, + pagination::Node, +}; +use rand::RngCore; +use sea_query::{Expr, PostgresQueryBuilder, Query, enum_def}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use ulid::Ulid; +use url::Url; +use uuid::Uuid; + +use crate::{ + DatabaseError, DatabaseInconsistencyError, + filter::{Filter, StatementExt, StatementWithJoinsExt}, + iden::{CompatSessions, CompatSsoLogins, UserSessions}, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +/// An implementation of [`CompatSessionRepository`] for a PostgreSQL connection +pub struct PgCompatSessionRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgCompatSessionRepository<'c> { + /// Create a new [`PgCompatSessionRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct CompatSessionLookup { + compat_session_id: Uuid, + device_id: Option, + human_name: Option, + user_id: Uuid, + user_session_id: Option, + created_at: DateTime, + finished_at: Option>, + is_synapse_admin: bool, + user_agent: Option, + last_active_at: Option>, + last_active_ip: Option, +} + +impl Node for CompatSessionLookup { + fn cursor(&self) -> Ulid { + self.compat_session_id.into() + } +} + +impl From for CompatSession { + fn from(value: CompatSessionLookup) -> Self { + let id = value.compat_session_id.into(); + + let state = match value.finished_at { + None => CompatSessionState::Valid, + Some(finished_at) => CompatSessionState::Finished { finished_at }, + }; + + CompatSession { + id, + state, + user_id: value.user_id.into(), + user_session_id: value.user_session_id.map(Ulid::from), + device: value.device_id.map(Device::from), + human_name: value.human_name, + created_at: value.created_at, + is_synapse_admin: value.is_synapse_admin, + user_agent: value.user_agent, + last_active_at: value.last_active_at, + last_active_ip: value.last_active_ip, + } + } +} + +#[derive(sqlx::FromRow)] +#[enum_def] +struct CompatSessionAndSsoLoginLookup { + compat_session_id: Uuid, + device_id: Option, + human_name: Option, + user_id: Uuid, + user_session_id: Option, + created_at: DateTime, + finished_at: Option>, + is_synapse_admin: bool, + user_agent: Option, + last_active_at: Option>, + last_active_ip: Option, + compat_sso_login_id: Option, + compat_sso_login_token: Option, + compat_sso_login_redirect_uri: Option, + compat_sso_login_created_at: Option>, + compat_sso_login_fulfilled_at: Option>, + compat_sso_login_exchanged_at: Option>, +} + +impl Node for CompatSessionAndSsoLoginLookup { + fn cursor(&self) -> Ulid { + self.compat_session_id.into() + } +} + +impl TryFrom for (CompatSession, Option) { + type Error = DatabaseInconsistencyError; + + fn try_from(value: CompatSessionAndSsoLoginLookup) -> Result { + let id = value.compat_session_id.into(); + + let state = match value.finished_at { + None => CompatSessionState::Valid, + Some(finished_at) => CompatSessionState::Finished { finished_at }, + }; + + let session = CompatSession { + id, + state, + user_id: value.user_id.into(), + device: value.device_id.map(Device::from), + human_name: value.human_name, + user_session_id: value.user_session_id.map(Ulid::from), + created_at: value.created_at, + is_synapse_admin: value.is_synapse_admin, + user_agent: value.user_agent, + last_active_at: value.last_active_at, + last_active_ip: value.last_active_ip, + }; + + match ( + value.compat_sso_login_id, + value.compat_sso_login_token, + value.compat_sso_login_redirect_uri, + value.compat_sso_login_created_at, + value.compat_sso_login_fulfilled_at, + value.compat_sso_login_exchanged_at, + ) { + (None, None, None, None, None, None) => Ok((session, None)), + ( + Some(id), + Some(login_token), + Some(redirect_uri), + Some(created_at), + fulfilled_at, + exchanged_at, + ) => { + let id = id.into(); + let redirect_uri = Url::parse(&redirect_uri).map_err(|e| { + DatabaseInconsistencyError::on("compat_sso_logins") + .column("redirect_uri") + .row(id) + .source(e) + })?; + + let state = match (fulfilled_at, exchanged_at) { + (Some(fulfilled_at), Some(exchanged_at)) => CompatSsoLoginState::Exchanged { + fulfilled_at, + exchanged_at, + compat_session_id: session.id, + }, + _ => return Err(DatabaseInconsistencyError::on("compat_sso_logins").row(id)), + }; + + let login = CompatSsoLogin { + id, + redirect_uri, + login_token, + created_at, + state, + }; + + Ok((session, Some(login))) + } + _ => Err(DatabaseInconsistencyError::on("compat_sso_logins").row(id)), + } + } +} + +impl Filter for CompatSessionFilter<'_> { + fn generate_condition(&self, has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.user().map(|user| { + Expr::col((CompatSessions::Table, CompatSessions::UserId)).eq(Uuid::from(user.id)) + })) + .add_option(self.browser_session().map(|browser_session| { + Expr::col((CompatSessions::Table, CompatSessions::UserSessionId)) + .eq(Uuid::from(browser_session.id)) + })) + .add_option(self.browser_session_filter().map(|browser_session_filter| { + Expr::col((CompatSessions::Table, CompatSessions::UserSessionId)).in_subquery( + Query::select() + .expr(Expr::col(( + UserSessions::Table, + UserSessions::UserSessionId, + ))) + .apply_filter(browser_session_filter) + .from(UserSessions::Table) + .take(), + ) + })) + .add_option(self.state().map(|state| { + if state.is_active() { + Expr::col((CompatSessions::Table, CompatSessions::FinishedAt)).is_null() + } else { + Expr::col((CompatSessions::Table, CompatSessions::FinishedAt)).is_not_null() + } + })) + .add_option(self.auth_type().map(|auth_type| { + // In in the SELECT to list sessions, we can rely on the JOINed table, whereas + // in other queries we need to do a subquery + if has_joins { + if auth_type.is_sso_login() { + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::CompatSsoLoginId)) + .is_not_null() + } else { + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::CompatSsoLoginId)) + .is_null() + } + } else { + // This builds either a: + // `WHERE compat_session_id = ANY(...)` + // or a `WHERE compat_session_id <> ALL(...)` + let compat_sso_logins = Query::select() + .expr(Expr::col(( + CompatSsoLogins::Table, + CompatSsoLogins::CompatSessionId, + ))) + .from(CompatSsoLogins::Table) + .take(); + + if auth_type.is_sso_login() { + Expr::col((CompatSessions::Table, CompatSessions::CompatSessionId)) + .eq(Expr::any(compat_sso_logins)) + } else { + Expr::col((CompatSessions::Table, CompatSessions::CompatSessionId)) + .ne(Expr::all(compat_sso_logins)) + } + } + })) + .add_option(self.last_active_after().map(|last_active_after| { + Expr::col((CompatSessions::Table, CompatSessions::LastActiveAt)) + .gt(last_active_after) + })) + .add_option(self.last_active_before().map(|last_active_before| { + Expr::col((CompatSessions::Table, CompatSessions::LastActiveAt)) + .lt(last_active_before) + })) + .add_option(self.device().map(|device| { + Expr::col((CompatSessions::Table, CompatSessions::DeviceId)).eq(device.as_str()) + })) + } +} + +#[async_trait] +impl CompatSessionRepository for PgCompatSessionRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.compat_session.lookup", + skip_all, + fields( + db.query.text, + compat_session.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + CompatSessionLookup, + r#" + SELECT compat_session_id + , device_id + , human_name + , user_id + , user_session_id + , created_at + , finished_at + , is_synapse_admin + , user_agent + , last_active_at + , last_active_ip as "last_active_ip: IpAddr" + FROM compat_sessions + WHERE compat_session_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.compat_session.add", + skip_all, + fields( + db.query.text, + compat_session.id, + %user.id, + %user.username, + compat_session.device.id = device.as_str(), + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + device: Device, + browser_session: Option<&BrowserSession>, + is_synapse_admin: bool, + human_name: Option, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("compat_session.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO compat_sessions + (compat_session_id, user_id, device_id, + user_session_id, created_at, is_synapse_admin, + human_name) + VALUES ($1, $2, $3, $4, $5, $6, $7) + "#, + Uuid::from(id), + Uuid::from(user.id), + device.as_str(), + browser_session.map(|s| Uuid::from(s.id)), + created_at, + is_synapse_admin, + human_name.as_deref(), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(CompatSession { + id, + state: CompatSessionState::default(), + user_id: user.id, + device: Some(device), + human_name, + user_session_id: browser_session.map(|s| s.id), + created_at, + is_synapse_admin, + user_agent: None, + last_active_at: None, + last_active_ip: None, + }) + } + + #[tracing::instrument( + name = "db.compat_session.finish", + skip_all, + fields( + db.query.text, + %compat_session.id, + user.id = %compat_session.user_id, + compat_session.device.id = compat_session.device.as_ref().map(mas_data_model::Device::as_str), + ), + err, + )] + async fn finish( + &mut self, + clock: &dyn Clock, + compat_session: CompatSession, + ) -> Result { + let finished_at = clock.now(); + + let res = sqlx::query!( + r#" + UPDATE compat_sessions cs + SET finished_at = $2 + WHERE compat_session_id = $1 + "#, + Uuid::from(compat_session.id), + finished_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + let compat_session = compat_session + .finish(finished_at) + .map_err(DatabaseError::to_invalid_operation)?; + + Ok(compat_session) + } + + #[tracing::instrument( + name = "db.compat_session.finish_bulk", + skip_all, + fields(db.query.text), + err, + )] + async fn finish_bulk( + &mut self, + clock: &dyn Clock, + filter: CompatSessionFilter<'_>, + ) -> Result { + let finished_at = clock.now(); + let (sql, arguments) = Query::update() + .table(CompatSessions::Table) + .value(CompatSessions::FinishedAt, finished_at) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let res = sqlx::query_with(&sql, arguments) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(res.rows_affected().try_into().unwrap_or(usize::MAX)) + } + + #[tracing::instrument( + name = "db.compat_session.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: CompatSessionFilter<'_>, + pagination: Pagination, + ) -> Result)>, Self::Error> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::CompatSessionId)), + CompatSessionAndSsoLoginLookupIden::CompatSessionId, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::DeviceId)), + CompatSessionAndSsoLoginLookupIden::DeviceId, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::HumanName)), + CompatSessionAndSsoLoginLookupIden::HumanName, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::UserId)), + CompatSessionAndSsoLoginLookupIden::UserId, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::UserSessionId)), + CompatSessionAndSsoLoginLookupIden::UserSessionId, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::CreatedAt)), + CompatSessionAndSsoLoginLookupIden::CreatedAt, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::FinishedAt)), + CompatSessionAndSsoLoginLookupIden::FinishedAt, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::IsSynapseAdmin)), + CompatSessionAndSsoLoginLookupIden::IsSynapseAdmin, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::UserAgent)), + CompatSessionAndSsoLoginLookupIden::UserAgent, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::LastActiveAt)), + CompatSessionAndSsoLoginLookupIden::LastActiveAt, + ) + .expr_as( + Expr::col((CompatSessions::Table, CompatSessions::LastActiveIp)), + CompatSessionAndSsoLoginLookupIden::LastActiveIp, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::CompatSsoLoginId)), + CompatSessionAndSsoLoginLookupIden::CompatSsoLoginId, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::LoginToken)), + CompatSessionAndSsoLoginLookupIden::CompatSsoLoginToken, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::RedirectUri)), + CompatSessionAndSsoLoginLookupIden::CompatSsoLoginRedirectUri, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::CreatedAt)), + CompatSessionAndSsoLoginLookupIden::CompatSsoLoginCreatedAt, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::FulfilledAt)), + CompatSessionAndSsoLoginLookupIden::CompatSsoLoginFulfilledAt, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::ExchangedAt)), + CompatSessionAndSsoLoginLookupIden::CompatSsoLoginExchangedAt, + ) + .from(CompatSessions::Table) + .left_join( + CompatSsoLogins::Table, + Expr::col((CompatSessions::Table, CompatSessions::CompatSessionId)) + .equals((CompatSsoLogins::Table, CompatSsoLogins::CompatSessionId)), + ) + .apply_filter_with_joins(filter) + .generate_pagination( + (CompatSessions::Table, CompatSessions::CompatSessionId), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination.process(edges).try_map(TryFrom::try_from)?; + + Ok(page) + } + + #[tracing::instrument( + name = "db.compat_session.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count(&mut self, filter: CompatSessionFilter<'_>) -> Result { + let (sql, arguments) = sea_query::Query::select() + .expr(Expr::col((CompatSessions::Table, CompatSessions::CompatSessionId)).count()) + .from(CompatSessions::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.compat_session.record_batch_activity", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn record_batch_activity( + &mut self, + mut activities: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error> { + // Sort the activity by ID, so that when batching the updates, Postgres + // locks the rows in a stable order, preventing deadlocks + activities.sort_unstable(); + let mut ids = Vec::with_capacity(activities.len()); + let mut last_activities = Vec::with_capacity(activities.len()); + let mut ips = Vec::with_capacity(activities.len()); + + for (id, last_activity, ip) in activities { + ids.push(Uuid::from(id)); + last_activities.push(last_activity); + ips.push(ip); + } + + let res = sqlx::query!( + r#" + UPDATE compat_sessions + SET last_active_at = GREATEST(t.last_active_at, compat_sessions.last_active_at) + , last_active_ip = COALESCE(t.last_active_ip, compat_sessions.last_active_ip) + FROM ( + SELECT * + FROM UNNEST($1::uuid[], $2::timestamptz[], $3::inet[]) + AS t(compat_session_id, last_active_at, last_active_ip) + ) AS t + WHERE compat_sessions.compat_session_id = t.compat_session_id + "#, + &ids, + &last_activities, + &ips as &[Option], + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, ids.len().try_into().unwrap_or(u64::MAX))?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.compat_session.record_user_agent", + skip_all, + fields( + db.query.text, + %compat_session.id, + ), + err, + )] + async fn record_user_agent( + &mut self, + mut compat_session: CompatSession, + user_agent: String, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE compat_sessions + SET user_agent = $2 + WHERE compat_session_id = $1 + "#, + Uuid::from(compat_session.id), + &*user_agent, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + compat_session.user_agent = Some(user_agent); + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(compat_session) + } + + #[tracing::instrument( + name = "repository.compat_session.set_human_name", + skip(self), + fields( + compat_session.id = %compat_session.id, + compat_session.human_name = ?human_name, + ), + err, + )] + async fn set_human_name( + &mut self, + mut compat_session: CompatSession, + human_name: Option, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE compat_sessions + SET human_name = $2 + WHERE compat_session_id = $1 + "#, + Uuid::from(compat_session.id), + human_name.as_deref(), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + compat_session.human_name = human_name; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(compat_session) + } + + #[tracing::instrument( + name = "db.compat_session.cleanup_finished", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn cleanup_finished( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH + to_delete AS ( + SELECT compat_session_id, finished_at + FROM compat_sessions + WHERE finished_at IS NOT NULL + AND ($1::timestamptz IS NULL OR finished_at >= $1) + AND finished_at < $2 + ORDER BY finished_at ASC + LIMIT $3 + FOR UPDATE + ), + + -- Delete refresh tokens first because they reference access tokens + deleted_refresh_tokens AS ( + DELETE FROM compat_refresh_tokens + USING to_delete + WHERE compat_refresh_tokens.compat_session_id = to_delete.compat_session_id + ), + + deleted_access_tokens AS ( + DELETE FROM compat_access_tokens + USING to_delete + WHERE compat_access_tokens.compat_session_id = to_delete.compat_session_id + ), + + deleted_sso_logins AS ( + DELETE FROM compat_sso_logins + USING to_delete + WHERE compat_sso_logins.compat_session_id = to_delete.compat_session_id + ), + + deleted_sessions AS ( + DELETE FROM compat_sessions + USING to_delete + WHERE compat_sessions.compat_session_id = to_delete.compat_session_id + RETURNING compat_sessions.finished_at + ) + + SELECT + COUNT(*) as "count!", + MAX(finished_at) as last_finished_at + FROM deleted_sessions + "#, + since, + until, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_finished_at, + )) + } + + #[tracing::instrument( + name = "db.compat_session.cleanup_inactive_ips", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + threshold = %threshold, + limit = limit, + ), + err, + )] + async fn cleanup_inactive_ips( + &mut self, + since: Option>, + threshold: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH to_update AS ( + SELECT compat_session_id, last_active_at + FROM compat_sessions + WHERE last_active_ip IS NOT NULL + AND last_active_at IS NOT NULL + AND ($1::timestamptz IS NULL OR last_active_at >= $1) + AND last_active_at < $2 + ORDER BY last_active_at ASC + LIMIT $3 + FOR UPDATE + ), + updated AS ( + UPDATE compat_sessions + SET last_active_ip = NULL + FROM to_update + WHERE compat_sessions.compat_session_id = to_update.compat_session_id + RETURNING compat_sessions.last_active_at + ) + SELECT COUNT(*) AS "count!", MAX(last_active_at) AS last_active_at FROM updated + "#, + since, + threshold, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_active_at, + )) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/compat/sso_login.rs b/matrix-authentication-service/crates/storage-pg/src/compat/sso_login.rs new file mode 100644 index 00000000..43ad4bea --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/compat/sso_login.rs @@ -0,0 +1,483 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{BrowserSession, Clock, CompatSession, CompatSsoLogin, CompatSsoLoginState}; +use mas_storage::{ + Page, Pagination, + compat::{CompatSsoLoginFilter, CompatSsoLoginRepository}, + pagination::Node, +}; +use rand::RngCore; +use sea_query::{Expr, PostgresQueryBuilder, Query, enum_def}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use ulid::Ulid; +use url::Url; +use uuid::Uuid; + +use crate::{ + DatabaseError, DatabaseInconsistencyError, + filter::{Filter, StatementExt}, + iden::{CompatSsoLogins, UserSessions}, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +/// An implementation of [`CompatSsoLoginRepository`] for a PostgreSQL +/// connection +pub struct PgCompatSsoLoginRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgCompatSsoLoginRepository<'c> { + /// Create a new [`PgCompatSsoLoginRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[derive(sqlx::FromRow, Debug)] +#[enum_def] +struct CompatSsoLoginLookup { + compat_sso_login_id: Uuid, + login_token: String, + redirect_uri: String, + created_at: DateTime, + fulfilled_at: Option>, + exchanged_at: Option>, + user_session_id: Option, + compat_session_id: Option, +} + +impl Node for CompatSsoLoginLookup { + fn cursor(&self) -> Ulid { + self.compat_sso_login_id.into() + } +} + +impl TryFrom for CompatSsoLogin { + type Error = DatabaseInconsistencyError; + + fn try_from(res: CompatSsoLoginLookup) -> Result { + let id = res.compat_sso_login_id.into(); + let redirect_uri = Url::parse(&res.redirect_uri).map_err(|e| { + DatabaseInconsistencyError::on("compat_sso_logins") + .column("redirect_uri") + .row(id) + .source(e) + })?; + + let state = match ( + res.fulfilled_at, + res.exchanged_at, + res.user_session_id, + res.compat_session_id, + ) { + (None, None, None, None) => CompatSsoLoginState::Pending, + (Some(fulfilled_at), None, Some(browser_session_id), None) => { + CompatSsoLoginState::Fulfilled { + fulfilled_at, + browser_session_id: browser_session_id.into(), + } + } + (Some(fulfilled_at), Some(exchanged_at), _, Some(compat_session_id)) => { + CompatSsoLoginState::Exchanged { + fulfilled_at, + exchanged_at, + compat_session_id: compat_session_id.into(), + } + } + _ => return Err(DatabaseInconsistencyError::on("compat_sso_logins").row(id)), + }; + + Ok(CompatSsoLogin { + id, + login_token: res.login_token, + redirect_uri, + created_at: res.created_at, + state, + }) + } +} + +impl Filter for CompatSsoLoginFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.user().map(|user| { + Expr::exists( + Query::select() + .expr(Expr::cust("1")) + .from(UserSessions::Table) + .and_where( + Expr::col((UserSessions::Table, UserSessions::UserId)) + .eq(Uuid::from(user.id)), + ) + .and_where( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::UserSessionId)) + .equals((UserSessions::Table, UserSessions::UserSessionId)), + ) + .take(), + ) + })) + .add_option(self.state().map(|state| { + if state.is_exchanged() { + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::ExchangedAt)).is_not_null() + } else if state.is_fulfilled() { + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::FulfilledAt)) + .is_not_null() + .and( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::ExchangedAt)) + .is_null(), + ) + } else { + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::FulfilledAt)).is_null() + } + })) + } +} + +#[async_trait] +impl CompatSsoLoginRepository for PgCompatSsoLoginRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.compat_sso_login.lookup", + skip_all, + fields( + db.query.text, + compat_sso_login.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + CompatSsoLoginLookup, + r#" + SELECT compat_sso_login_id + , login_token + , redirect_uri + , created_at + , fulfilled_at + , exchanged_at + , compat_session_id + , user_session_id + + FROM compat_sso_logins + WHERE compat_sso_login_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.compat_sso_login.find_for_session", + skip_all, + fields( + db.query.text, + %compat_session.id, + ), + err, + )] + async fn find_for_session( + &mut self, + compat_session: &CompatSession, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + CompatSsoLoginLookup, + r#" + SELECT compat_sso_login_id + , login_token + , redirect_uri + , created_at + , fulfilled_at + , exchanged_at + , compat_session_id + , user_session_id + + FROM compat_sso_logins + WHERE compat_session_id = $1 + "#, + Uuid::from(compat_session.id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.compat_sso_login.find_by_token", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn find_by_token( + &mut self, + login_token: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + CompatSsoLoginLookup, + r#" + SELECT compat_sso_login_id + , login_token + , redirect_uri + , created_at + , fulfilled_at + , exchanged_at + , compat_session_id + , user_session_id + + FROM compat_sso_logins + WHERE login_token = $1 + "#, + login_token, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.compat_sso_login.add", + skip_all, + fields( + db.query.text, + compat_sso_login.id, + compat_sso_login.redirect_uri = %redirect_uri, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + login_token: String, + redirect_uri: Url, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("compat_sso_login.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO compat_sso_logins + (compat_sso_login_id, login_token, redirect_uri, created_at) + VALUES ($1, $2, $3, $4) + "#, + Uuid::from(id), + &login_token, + redirect_uri.as_str(), + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(CompatSsoLogin { + id, + login_token, + redirect_uri, + created_at, + state: CompatSsoLoginState::default(), + }) + } + + #[tracing::instrument( + name = "db.compat_sso_login.fulfill", + skip_all, + fields( + db.query.text, + %compat_sso_login.id, + %browser_session.id, + user.id = %browser_session.user.id, + ), + err, + )] + async fn fulfill( + &mut self, + clock: &dyn Clock, + compat_sso_login: CompatSsoLogin, + browser_session: &BrowserSession, + ) -> Result { + let fulfilled_at = clock.now(); + let compat_sso_login = compat_sso_login + .fulfill(fulfilled_at, browser_session) + .map_err(DatabaseError::to_invalid_operation)?; + + let res = sqlx::query!( + r#" + UPDATE compat_sso_logins + SET + user_session_id = $2, + fulfilled_at = $3 + WHERE + compat_sso_login_id = $1 + "#, + Uuid::from(compat_sso_login.id), + Uuid::from(browser_session.id), + fulfilled_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(compat_sso_login) + } + + #[tracing::instrument( + name = "db.compat_sso_login.exchange", + skip_all, + fields( + db.query.text, + %compat_sso_login.id, + %compat_session.id, + compat_session.device.id = compat_session.device.as_ref().map(mas_data_model::Device::as_str), + ), + err, + )] + async fn exchange( + &mut self, + clock: &dyn Clock, + compat_sso_login: CompatSsoLogin, + compat_session: &CompatSession, + ) -> Result { + let exchanged_at = clock.now(); + let compat_sso_login = compat_sso_login + .exchange(exchanged_at, compat_session) + .map_err(DatabaseError::to_invalid_operation)?; + + let res = sqlx::query!( + r#" + UPDATE compat_sso_logins + SET + exchanged_at = $2, + compat_session_id = $3 + WHERE + compat_sso_login_id = $1 + "#, + Uuid::from(compat_sso_login.id), + exchanged_at, + Uuid::from(compat_session.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(compat_sso_login) + } + + #[tracing::instrument( + name = "db.compat_sso_login.list", + skip_all, + fields( + db.query.text, + ), + err + )] + async fn list( + &mut self, + filter: CompatSsoLoginFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::CompatSsoLoginId)), + CompatSsoLoginLookupIden::CompatSsoLoginId, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::CompatSessionId)), + CompatSsoLoginLookupIden::CompatSessionId, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::UserSessionId)), + CompatSsoLoginLookupIden::UserSessionId, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::LoginToken)), + CompatSsoLoginLookupIden::LoginToken, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::RedirectUri)), + CompatSsoLoginLookupIden::RedirectUri, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::CreatedAt)), + CompatSsoLoginLookupIden::CreatedAt, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::FulfilledAt)), + CompatSsoLoginLookupIden::FulfilledAt, + ) + .expr_as( + Expr::col((CompatSsoLogins::Table, CompatSsoLogins::ExchangedAt)), + CompatSsoLoginLookupIden::ExchangedAt, + ) + .from(CompatSsoLogins::Table) + .apply_filter(filter) + .generate_pagination( + (CompatSsoLogins::Table, CompatSsoLogins::CompatSsoLoginId), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination.process(edges).try_map(TryFrom::try_from)?; + + Ok(page) + } + + #[tracing::instrument( + name = "db.compat_sso_login.count", + skip_all, + fields( + db.query.text, + ), + err + )] + async fn count(&mut self, filter: CompatSsoLoginFilter<'_>) -> Result { + let (sql, arguments) = Query::select() + .expr(Expr::col((CompatSsoLogins::Table, CompatSsoLogins::CompatSsoLoginId)).count()) + .from(CompatSsoLogins::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/errors.rs b/matrix-authentication-service/crates/storage-pg/src/errors.rs new file mode 100644 index 00000000..4c50557f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/errors.rs @@ -0,0 +1,136 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use sqlx::postgres::PgQueryResult; +use thiserror::Error; +use ulid::Ulid; + +/// Generic error when interacting with the database +#[derive(Debug, Error)] +#[error(transparent)] +pub enum DatabaseError { + /// An error which came from the database itself + Driver { + /// The underlying error from the database driver + #[from] + source: sqlx::Error, + }, + + /// An error which occured while converting the data from the database + Inconsistency(#[from] DatabaseInconsistencyError), + + /// An error which happened because the requested database operation is + /// invalid + #[error("Invalid database operation")] + InvalidOperation { + /// The source of the error, if any + #[source] + source: Option>, + }, + + /// An error which happens when an operation affects not enough or too many + /// rows + #[error("Expected {expected} rows to be affected, but {actual} rows were affected")] + RowsAffected { + /// How many rows were expected to be affected + expected: u64, + + /// How many rows were actually affected + actual: u64, + }, +} + +impl DatabaseError { + pub(crate) fn ensure_affected_rows( + result: &PgQueryResult, + expected: u64, + ) -> Result<(), DatabaseError> { + let actual = result.rows_affected(); + if actual == expected { + Ok(()) + } else { + Err(DatabaseError::RowsAffected { expected, actual }) + } + } + + pub(crate) fn to_invalid_operation(e: E) -> Self { + Self::InvalidOperation { + source: Some(Box::new(e)), + } + } + + pub(crate) const fn invalid_operation() -> Self { + Self::InvalidOperation { source: None } + } +} + +/// An error which occured while converting the data from the database +#[derive(Debug, Error)] +pub struct DatabaseInconsistencyError { + /// The table which was being queried + table: &'static str, + + /// The column which was being queried + column: Option<&'static str>, + + /// The row which was being queried + row: Option, + + /// The source of the error + #[source] + source: Option>, +} + +impl std::fmt::Display for DatabaseInconsistencyError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Database inconsistency on table {}", self.table)?; + if let Some(column) = self.column { + write!(f, " column {column}")?; + } + if let Some(row) = self.row { + write!(f, " row {row}")?; + } + + Ok(()) + } +} + +impl DatabaseInconsistencyError { + /// Create a new [`DatabaseInconsistencyError`] for the given table + #[must_use] + pub(crate) const fn on(table: &'static str) -> Self { + Self { + table, + column: None, + row: None, + source: None, + } + } + + /// Set the column which was being queried + #[must_use] + pub(crate) const fn column(mut self, column: &'static str) -> Self { + self.column = Some(column); + self + } + + /// Set the row which was being queried + #[must_use] + pub(crate) const fn row(mut self, row: Ulid) -> Self { + self.row = Some(row); + self + } + + /// Give the source of the error + #[must_use] + pub(crate) fn source( + mut self, + source: E, + ) -> Self { + self.source = Some(Box::new(source)); + self + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/filter.rs b/matrix-authentication-service/crates/storage-pg/src/filter.rs new file mode 100644 index 00000000..d8bf3e93 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/filter.rs @@ -0,0 +1,57 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +/// A filter which can be applied to a query +pub(crate) trait Filter { + /// Generate a condition for the filter + /// + /// # Parameters + /// + /// * `has_joins`: Whether the condition has relationship joined or not + fn generate_condition(&self, has_joins: bool) -> impl sea_query::IntoCondition; +} + +pub(crate) trait StatementExt { + /// Apply the filter to the query + /// + /// The query must NOT have any relationship joined + fn apply_filter(&mut self, filter: F) -> &mut Self; +} + +pub(crate) trait StatementWithJoinsExt { + /// Apply the filter to the query + /// + /// The query MUST have any relationship joined + fn apply_filter_with_joins(&mut self, filter: F) -> &mut Self; +} + +impl StatementWithJoinsExt for sea_query::SelectStatement { + fn apply_filter_with_joins(&mut self, filter: F) -> &mut Self { + let condition = filter.generate_condition(true); + self.cond_where(condition) + } +} + +impl StatementExt for sea_query::SelectStatement { + fn apply_filter(&mut self, filter: F) -> &mut Self { + let condition = filter.generate_condition(false); + self.cond_where(condition) + } +} + +impl StatementExt for sea_query::UpdateStatement { + fn apply_filter(&mut self, filter: F) -> &mut Self { + let condition = filter.generate_condition(false); + self.cond_where(condition) + } +} + +impl StatementExt for sea_query::DeleteStatement { + fn apply_filter(&mut self, filter: F) -> &mut Self { + let condition = filter.generate_condition(false); + self.cond_where(condition) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/iden.rs b/matrix-authentication-service/crates/storage-pg/src/iden.rs new file mode 100644 index 00000000..f1af1a5d --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/iden.rs @@ -0,0 +1,210 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Table and column identifiers used by [`sea_query`] + +#[derive(sea_query::Iden)] +pub enum UserSessions { + Table, + UserSessionId, + UserId, + CreatedAt, + FinishedAt, + UserAgent, + LastActiveAt, + LastActiveIp, +} + +#[derive(sea_query::Iden)] +pub enum Users { + Table, + UserId, + Username, + CreatedAt, + LockedAt, + DeactivatedAt, + CanRequestAdmin, + IsGuest, +} + +#[derive(sea_query::Iden)] +pub enum UserEmails { + Table, + UserEmailId, + UserId, + Email, + CreatedAt, +} + +#[derive(sea_query::Iden)] +pub enum CompatSessions { + Table, + CompatSessionId, + UserId, + DeviceId, + HumanName, + UserSessionId, + CreatedAt, + FinishedAt, + IsSynapseAdmin, + UserAgent, + LastActiveAt, + LastActiveIp, +} + +#[derive(sea_query::Iden)] +pub enum CompatSsoLogins { + Table, + CompatSsoLoginId, + RedirectUri, + LoginToken, + CompatSessionId, + UserSessionId, + CreatedAt, + FulfilledAt, + ExchangedAt, +} + +#[derive(sea_query::Iden)] +#[iden = "oauth2_sessions"] +pub enum OAuth2Sessions { + Table, + #[iden = "oauth2_session_id"] + OAuth2SessionId, + UserId, + UserSessionId, + #[iden = "oauth2_client_id"] + OAuth2ClientId, + ScopeList, + CreatedAt, + FinishedAt, + UserAgent, + LastActiveAt, + LastActiveIp, + HumanName, +} + +#[derive(sea_query::Iden)] +#[iden = "oauth2_clients"] +pub enum OAuth2Clients { + Table, + #[iden = "oauth2_client_id"] + OAuth2ClientId, + IsStatic, +} + +#[derive(sea_query::Iden)] +#[iden = "personal_sessions"] +pub enum PersonalSessions { + Table, + PersonalSessionId, + OwnerUserId, + #[iden = "owner_oauth2_client_id"] + OwnerOAuth2ClientId, + ActorUserId, + HumanName, + ScopeList, + CreatedAt, + RevokedAt, + LastActiveAt, + LastActiveIp, +} + +#[derive(sea_query::Iden)] +#[iden = "personal_access_tokens"] +pub enum PersonalAccessTokens { + Table, + PersonalAccessTokenId, + PersonalSessionId, + // AccessTokenSha256, + CreatedAt, + ExpiresAt, + RevokedAt, +} + +#[derive(sea_query::Iden)] +#[iden = "upstream_oauth_providers"] +pub enum UpstreamOAuthProviders { + Table, + #[iden = "upstream_oauth_provider_id"] + UpstreamOAuthProviderId, + Issuer, + HumanName, + BrandName, + Scope, + ClientId, + EncryptedClientSecret, + TokenEndpointSigningAlg, + TokenEndpointAuthMethod, + IdTokenSignedResponseAlg, + FetchUserinfo, + UserinfoSignedResponseAlg, + CreatedAt, + DisabledAt, + ClaimsImports, + DiscoveryMode, + PkceMode, + ResponseMode, + AdditionalParameters, + ForwardLoginHint, + JwksUriOverride, + TokenEndpointOverride, + AuthorizationEndpointOverride, + UserinfoEndpointOverride, + OnBackchannelLogout, +} + +#[derive(sea_query::Iden)] +#[iden = "upstream_oauth_links"] +pub enum UpstreamOAuthLinks { + Table, + #[iden = "upstream_oauth_link_id"] + UpstreamOAuthLinkId, + #[iden = "upstream_oauth_provider_id"] + UpstreamOAuthProviderId, + UserId, + Subject, + HumanAccountName, + CreatedAt, +} + +#[derive(sea_query::Iden)] +#[iden = "upstream_oauth_authorization_sessions"] +pub enum UpstreamOAuthAuthorizationSessions { + Table, + #[iden = "upstream_oauth_authorization_session_id"] + UpstreamOAuthAuthorizationSessionId, + #[iden = "upstream_oauth_provider_id"] + UpstreamOAuthProviderId, + #[iden = "upstream_oauth_link_id"] + UpstreamOAuthLinkId, + State, + CodeChallengeVerifier, + Nonce, + IdToken, + IdTokenClaims, + ExtraCallbackParameters, + Userinfo, + CreatedAt, + CompletedAt, + ConsumedAt, + UnlinkedAt, + UserSessionId, +} + +#[derive(sea_query::Iden)] +pub enum UserRegistrationTokens { + Table, + UserRegistrationTokenId, + Token, + UsageLimit, + TimesUsed, + CreatedAt, + LastUsedAt, + ExpiresAt, + RevokedAt, +} diff --git a/matrix-authentication-service/crates/storage-pg/src/lib.rs b/matrix-authentication-service/crates/storage-pg/src/lib.rs new file mode 100644 index 00000000..ea202947 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/lib.rs @@ -0,0 +1,483 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! An implementation of the storage traits for a PostgreSQL database +//! +//! This backend uses [`sqlx`] to interact with the database. Most queries are +//! type-checked, using introspection data recorded in the `sqlx-data.json` +//! file. This file is generated by the `sqlx` CLI tool, and should be updated +//! whenever the database schema changes, or new queries are added. +//! +//! # Implementing a new repository +//! +//! When a new repository is defined in [`mas_storage`], it should be +//! implemented here, with the PostgreSQL backend. +//! +//! A typical implementation will look like this: +//! +//! ```rust +//! # use async_trait::async_trait; +//! # use ulid::Ulid; +//! # use rand::RngCore; +//! # use mas_data_model::Clock; +//! # use mas_storage_pg::{DatabaseError, ExecuteExt}; +//! # use sqlx::PgConnection; +//! # use uuid::Uuid; +//! # +//! # // A fake data structure, usually defined in mas-data-model +//! # #[derive(sqlx::FromRow)] +//! # struct FakeData { +//! # id: Ulid, +//! # } +//! # +//! # // A fake repository trait, usually defined in mas-storage +//! # #[async_trait] +//! # pub trait FakeDataRepository: Send + Sync { +//! # type Error; +//! # async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; +//! # async fn add( +//! # &mut self, +//! # rng: &mut (dyn RngCore + Send), +//! # clock: &dyn Clock, +//! # ) -> Result; +//! # } +//! # +//! /// An implementation of [`FakeDataRepository`] for a PostgreSQL connection +//! pub struct PgFakeDataRepository<'c> { +//! conn: &'c mut PgConnection, +//! } +//! +//! impl<'c> PgFakeDataRepository<'c> { +//! /// Create a new [`FakeDataRepository`] from an active PostgreSQL connection +//! pub fn new(conn: &'c mut PgConnection) -> Self { +//! Self { conn } +//! } +//! } +//! +//! #[derive(sqlx::FromRow)] +//! struct FakeDataLookup { +//! fake_data_id: Uuid, +//! } +//! +//! impl From for FakeData { +//! fn from(value: FakeDataLookup) -> Self { +//! Self { +//! id: value.fake_data_id.into(), +//! } +//! } +//! } +//! +//! #[async_trait] +//! impl<'c> FakeDataRepository for PgFakeDataRepository<'c> { +//! type Error = DatabaseError; +//! +//! #[tracing::instrument( +//! name = "db.fake_data.lookup", +//! skip_all, +//! fields( +//! db.query.text, +//! fake_data.id = %id, +//! ), +//! err, +//! )] +//! async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { +//! // Note: here we would use the macro version instead, but it's not possible here in +//! // this documentation example +//! let res: Option = sqlx::query_as( +//! r#" +//! SELECT fake_data_id +//! FROM fake_data +//! WHERE fake_data_id = $1 +//! "#, +//! ) +//! .bind(Uuid::from(id)) +//! .traced() +//! .fetch_optional(&mut *self.conn) +//! .await?; +//! +//! let Some(res) = res else { return Ok(None) }; +//! +//! Ok(Some(res.into())) +//! } +//! +//! #[tracing::instrument( +//! name = "db.fake_data.add", +//! skip_all, +//! fields( +//! db.query.text, +//! fake_data.id, +//! ), +//! err, +//! )] +//! async fn add( +//! &mut self, +//! rng: &mut (dyn RngCore + Send), +//! clock: &dyn Clock, +//! ) -> Result { +//! let created_at = clock.now(); +//! let id = Ulid::from_datetime_with_source(created_at.into(), rng); +//! tracing::Span::current().record("fake_data.id", tracing::field::display(id)); +//! +//! // Note: here we would use the macro version instead, but it's not possible here in +//! // this documentation example +//! sqlx::query( +//! r#" +//! INSERT INTO fake_data (id) +//! VALUES ($1) +//! "#, +//! ) +//! .bind(Uuid::from(id)) +//! .traced() +//! .execute(&mut *self.conn) +//! .await?; +//! +//! Ok(FakeData { +//! id, +//! }) +//! } +//! } +//! ``` +//! +//! A few things to note with the implementation: +//! +//! - All methods are traced, with an explicit, somewhat consistent name. +//! - The SQL statement is included as attribute, by declaring a +//! `db.query.text` attribute on the tracing span, and then calling +//! [`ExecuteExt::traced`]. +//! - The IDs are all [`Ulid`], and generated from the clock and the random +//! number generated passed as parameters. The generated IDs are recorded in +//! the span. +//! - The IDs are stored as [`Uuid`] in PostgreSQL, so conversions are required +//! - "Not found" errors are handled by returning `Ok(None)` instead of an +//! error. +//! +//! [`Ulid`]: ulid::Ulid +//! [`Uuid`]: uuid::Uuid + +#![deny(clippy::future_not_send, missing_docs)] +#![allow(clippy::module_name_repetitions, clippy::blocks_in_conditions)] + +use std::collections::{BTreeMap, BTreeSet, HashSet}; + +use ::tracing::{Instrument, debug, info, info_span, warn}; +use opentelemetry_semantic_conventions::trace::DB_QUERY_TEXT; +use sqlx::{ + Either, PgConnection, + migrate::{AppliedMigration, Migrate, MigrateError, Migration, Migrator}, + postgres::{PgAdvisoryLock, PgAdvisoryLockKey}, +}; + +pub mod app_session; +pub mod compat; +pub mod oauth2; +pub mod personal; +pub mod queue; +pub mod upstream_oauth2; +pub mod user; + +mod errors; +pub(crate) mod filter; +pub(crate) mod iden; +pub(crate) mod pagination; +pub(crate) mod policy_data; +pub(crate) mod repository; +pub(crate) mod telemetry; +pub(crate) mod tracing; + +pub(crate) use self::errors::DatabaseInconsistencyError; +pub use self::{ + errors::DatabaseError, + repository::{PgRepository, PgRepositoryFactory}, + tracing::ExecuteExt, +}; + +/// Embedded migrations in the binary +pub static MIGRATOR: Migrator = sqlx::migrate!(); + +fn available_migrations() -> BTreeMap { + MIGRATOR.iter().map(|m| (m.version, m)).collect() +} + +/// This is the list of migrations we've removed from the migration history but +/// might have been applied in the past +#[allow(clippy::inconsistent_digit_grouping)] +const ALLOWED_MISSING_MIGRATIONS: &[i64] = &[ + // https://github.com/matrix-org/matrix-authentication-service/pull/1585 + 20220709_210445, + 20230330_210841, + 20230408_110421, +]; + +fn allowed_missing_migrations() -> BTreeSet { + ALLOWED_MISSING_MIGRATIONS.iter().copied().collect() +} + +/// This is a list of possible additional checksums from previous versions of +/// migrations. The checksum we store in the database is 48 bytes long. We're +/// not really concerned with partial hash collisions, and to avoid this file to +/// be completely unreadable, we only store the upper 16 bytes of that hash. +#[allow(clippy::inconsistent_digit_grouping)] +const ALLOWED_ALTERNATE_CHECKSUMS: &[(i64, u128)] = &[ + // https://github.com/element-hq/matrix-authentication-service/pull/5300 + (20250410_000000, 0x8811_c3ef_dbee_8c00_5b49_25da_5d55_9c3f), + (20250410_000001, 0x7990_37b3_2193_8a5d_c72f_bccd_95fd_82e5), + (20250410_000002, 0xf2b8_f120_deae_27e7_60d0_79a3_0b77_eea3), + (20250410_000003, 0x06be_fc2b_cedc_acf4_b981_02c7_b40c_c469), + (20250410_000004, 0x0a90_9c6a_dba7_545c_10d9_60eb_6d30_2f50), + (20250410_000006, 0xcc7f_5152_6497_5729_d94b_be0d_9c95_8316), + (20250410_000007, 0x12e7_cfab_a017_a5a5_4f2c_18fa_541c_ce62), + (20250410_000008, 0x171d_62e5_ee1a_f0d9_3639_6c5a_277c_54cd), + (20250410_000009, 0xb1a0_93c7_6645_92ad_df45_b395_57bb_a281), + (20250410_000010, 0x8089_86ac_7cff_8d86_2850_d287_cdb1_2b57), + (20250410_000011, 0x8d9d_3fae_02c9_3d3f_81e4_6242_2b39_b5b8), + (20250410_000012, 0x9805_1372_41aa_d5b0_ebe1_ba9d_28c7_faf6), + (20250410_000013, 0x7291_9a97_e4d1_0d45_1791_6e8c_3f2d_e34d), + (20250410_000014, 0x811d_f965_8127_e168_4aa2_f177_a4e6_f077), + (20250410_000015, 0xa639_0780_aab7_d60d_5fcb_771d_13ed_73ee), + (20250410_000016, 0x22b6_e909_6de4_39e3_b2b9_c684_7417_fe07), + (20250410_000017, 0x9dfe_b6d3_89e4_e509_651b_2793_8d8d_cd32), + (20250410_000018, 0x638f_bdbc_2276_5094_020b_cec1_ab95_c07f), + (20250410_000019, 0xa283_84bc_5fd5_7cbd_b5fb_b5fe_0255_6845), + (20250410_000020, 0x17d1_54b1_7c6e_fc48_61dd_da3d_f8a5_9546), + (20250410_000022, 0xbc36_af82_994a_6f93_8aca_a46b_fc3c_ffde), + (20250410_000023, 0x54ec_3b07_ac79_443b_9e18_a2b3_2d17_5ab9), + (20250410_000024, 0x8ab4_4f80_00b6_58b2_d757_c40f_bc72_3d87), + (20250410_000025, 0x5dc4_2ff3_3042_2f45_046d_10af_ab3a_b583), + (20250410_000026, 0x5263_c547_0b64_6425_5729_48b2_ce84_7cad), + (20250410_000027, 0x0aad_cb50_1d6a_7794_9017_d24d_55e7_1b9d), + (20250410_000028, 0x8fc1_92f8_68df_ca4e_3e2b_cddf_bc12_cffe), + (20250410_000029, 0x416c_9446_b6a3_1b49_2940_a8ac_c1c2_665a), + (20250410_000030, 0x83a5_e51e_25a6_77fb_2b79_6ea5_db1e_364f), + (20250410_000031, 0xfa18_a707_9438_dbc7_2cde_b5f1_ee21_5c7e), + (20250410_000032, 0xd669_662e_8930_838a_b142_c3fa_7b39_d2a0), + (20250410_000033, 0x4019_1053_cabc_191c_c02e_9aa9_407c_0de5), + (20250410_000034, 0xdd59_e595_24e6_4dad_c5f7_fef2_90b8_df57), + (20250410_000035, 0x09b4_ea53_2da4_9c39_eb10_db33_6a6d_608b), + (20250410_000036, 0x3ca5_9c78_8480_e342_d729_907c_d293_2049), + (20250410_000037, 0xc857_2a10_450b_0612_822c_2b86_535a_ea7d), + (20250410_000038, 0x1642_39da_9c3b_d9fd_b1e1_72b1_db78_b978), + (20250410_000039, 0xdd70_b211_6016_bb84_0d84_f04e_eb8a_59d9), + (20250410_000040, 0xe435_ead6_c363_a0b6_e048_dd85_0ecb_9499), + (20250410_000041, 0xe9f3_122f_70d4_9839_c818_4b18_0192_ae26), + (20250410_000043, 0xec5e_1400_483d_c4bf_6014_aba4_ffc3_6236), + (20250410_000044, 0x4750_5eba_4095_6664_78d0_27f9_64bf_64f4), + (20250410_000045, 0x9a53_bd70_4cad_2bf1_61d4_f143_0c82_681d), + (20250410_121612, 0x25f0_9d20_a897_df18_162d_1c47_b68e_81bd), + (20250602_212101, 0xd1a8_782c_b3f0_5045_3f46_49a0_bab0_822b), + (20250708_155857, 0xb78e_6957_a588_c16a_d292_a0c7_cae9_f290), + (20250915_092635, 0x6854_d58b_99d7_3ac5_82f8_25e5_b1c3_cc0b), + (20251127_145951, 0x3bcd_d92e_8391_2a2c_8a18_1d76_354f_96c6), +]; + +fn alternate_checksums_map() -> BTreeMap> { + let mut map = BTreeMap::new(); + for (version, checksum) in ALLOWED_ALTERNATE_CHECKSUMS { + map.entry(*version) + .or_insert_with(HashSet::new) + .insert(*checksum); + } + map +} + +/// Load the list of applied migrations into a map. +/// +/// It's important to use a [`BTreeMap`] so that the migrations are naturally +/// ordered by version. +async fn applied_migrations_map( + conn: &mut PgConnection, +) -> Result, MigrateError> { + let applied_migrations = conn + .list_applied_migrations() + .await? + .into_iter() + .map(|m| (m.version, m)) + .collect(); + + Ok(applied_migrations) +} + +/// Checks if the migration table exists +async fn migration_table_exists(conn: &mut PgConnection) -> Result { + sqlx::query_scalar!( + r#" + SELECT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_name = '_sqlx_migrations' + ) AS "exists!" + "#, + ) + .fetch_one(conn) + .await +} + +/// Run the migrations on the given connection +/// +/// This function acquires an advisory lock on the database to ensure that only +/// one migrator is running at a time. +/// +/// # Errors +/// +/// This function returns an error if the migration fails. +#[::tracing::instrument(name = "db.migrate", skip_all, err)] +pub async fn migrate(conn: &mut PgConnection) -> Result<(), MigrateError> { + // Get the database name and use it to derive an advisory lock key. This + // is the same lock key used by SQLx default migrator, so that it works even + // with older versions of MAS, and when running through `cargo sqlx migrate run` + let database_name = sqlx::query_scalar!(r#"SELECT current_database() as "current_database!""#) + .fetch_one(&mut *conn) + .await + .map_err(MigrateError::from)?; + + let lock = + PgAdvisoryLock::with_key(PgAdvisoryLockKey::BigInt(generate_lock_id(&database_name))); + + // Try to acquire the migration lock in a loop. + // + // The reason we do that with a `try_acquire` is because in Postgres, `CREATE + // INDEX CONCURRENTLY` will *not* complete whilst an advisory lock is being + // acquired on another connection. This then means that if we run two + // migration process at the same time, one of them will go through and block + // on concurrent index creations, because the other will get stuck trying to + // acquire this lock. + // + // To avoid this, we use `try_acquire`/`pg_advisory_lock_try` in a loop, which + // will fail immediately if the lock is held by another connection, allowing + // potential 'CREATE INDEX CONCURRENTLY' statements to complete. + let mut backoff = std::time::Duration::from_millis(250); + let mut conn = conn; + let mut locked_connection = loop { + match lock.try_acquire(conn).await? { + Either::Left(guard) => break guard, + Either::Right(conn_) => { + warn!( + "Another process is already running migrations on the database, waiting {duration}s and trying again…", + duration = backoff.as_secs_f32() + ); + tokio::time::sleep(backoff).await; + backoff = std::cmp::min(backoff * 2, std::time::Duration::from_secs(5)); + conn = conn_; + } + } + }; + + // Creates the migration table if missing + // We check if the table exists before calling `ensure_migrations_table` to + // avoid the pesky 'relation "_sqlx_migrations" already exists, skipping' notice + if !migration_table_exists(locked_connection.as_mut()).await? { + locked_connection.as_mut().ensure_migrations_table().await?; + } + + for migration in pending_migrations(locked_connection.as_mut()).await? { + info!( + "Applying migration {version}: {description}", + version = migration.version, + description = migration.description + ); + locked_connection + .as_mut() + .apply(migration) + .instrument(info_span!( + "db.migrate.run_migration", + db.migration.version = migration.version, + db.migration.description = &*migration.description, + { DB_QUERY_TEXT } = &*migration.sql, + )) + .await?; + } + + locked_connection.release_now().await?; + + Ok(()) +} + +/// Get the list of pending migrations +/// +/// # Errors +/// +/// This function returns an error if there is a problem checking the applied +/// migrations +pub async fn pending_migrations( + conn: &mut PgConnection, +) -> Result, MigrateError> { + // Load the maps of available migrations, applied migrations, migrations that + // are allowed to be missing, alternate checksums for migrations that changed + let available_migrations = available_migrations(); + let allowed_missing = allowed_missing_migrations(); + let alternate_checksums = alternate_checksums_map(); + let applied_migrations = if migration_table_exists(&mut *conn).await? { + applied_migrations_map(&mut *conn).await? + } else { + BTreeMap::new() + }; + + // Check that all applied migrations are still valid + for applied_migration in applied_migrations.values() { + // Check that we know about the applied migration + if let Some(migration) = available_migrations.get(&applied_migration.version) { + // Check the migration checksum + if applied_migration.checksum != migration.checksum { + // The checksum we have in the database doesn't match the one we + // have embedded. This might be because a migration was + // intentionally changed, so we check the alternate checksums + if let Some(alternates) = alternate_checksums.get(&applied_migration.version) { + // This converts the first 16 bytes of the checksum into a u128 + let Some(applied_checksum_prefix) = applied_migration + .checksum + .get(..16) + .and_then(|bytes| bytes.try_into().ok()) + .map(u128::from_be_bytes) + else { + return Err(MigrateError::ExecuteMigration( + sqlx::Error::InvalidArgument( + "checksum stored in database is invalid".to_owned(), + ), + applied_migration.version, + )); + }; + + if !alternates.contains(&applied_checksum_prefix) { + warn!( + "The database has a migration applied ({version}) which has known alternative checksums {alternates:x?}, but none of them matched {applied_checksum_prefix:x}", + version = applied_migration.version, + ); + return Err(MigrateError::VersionMismatch(applied_migration.version)); + } + } else { + return Err(MigrateError::VersionMismatch(applied_migration.version)); + } + } + } else if allowed_missing.contains(&applied_migration.version) { + // The migration is missing, but allowed to be missing + debug!( + "The database has a migration applied ({version}) that doesn't exist anymore, but it was intentionally removed", + version = applied_migration.version + ); + } else { + // The migration is missing, warn about it + warn!( + "The database has a migration applied ({version}) that doesn't exist anymore! This should not happen, unless rolling back to an older version of MAS.", + version = applied_migration.version + ); + } + } + + Ok(available_migrations + .values() + .copied() + .filter(|migration| { + !migration.migration_type.is_down_migration() + && !applied_migrations.contains_key(&migration.version) + }) + .collect()) +} + +// Copied from the sqlx source code, so that we generate the same lock ID +fn generate_lock_id(database_name: &str) -> i64 { + const CRC_IEEE: crc::Crc = crc::Crc::::new(&crc::CRC_32_ISO_HDLC); + // 0x3d32ad9e chosen by fair dice roll + 0x3d32_ad9e * i64::from(CRC_IEEE.checksum(database_name.as_bytes())) +} diff --git a/matrix-authentication-service/crates/storage-pg/src/oauth2/access_token.rs b/matrix-authentication-service/crates/storage-pg/src/oauth2/access_token.rs new file mode 100644 index 00000000..a8d49ac3 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/oauth2/access_token.rs @@ -0,0 +1,366 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Duration, Utc}; +use mas_data_model::{AccessToken, AccessTokenState, Clock, Session}; +use mas_storage::oauth2::OAuth2AccessTokenRepository; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, tracing::ExecuteExt}; + +/// An implementation of [`OAuth2AccessTokenRepository`] for a PostgreSQL +/// connection +pub struct PgOAuth2AccessTokenRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgOAuth2AccessTokenRepository<'c> { + /// Create a new [`PgOAuth2AccessTokenRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct OAuth2AccessTokenLookup { + oauth2_access_token_id: Uuid, + oauth2_session_id: Uuid, + access_token: String, + created_at: DateTime, + expires_at: Option>, + revoked_at: Option>, + first_used_at: Option>, +} + +impl From for AccessToken { + fn from(value: OAuth2AccessTokenLookup) -> Self { + let state = match value.revoked_at { + None => AccessTokenState::Valid, + Some(revoked_at) => AccessTokenState::Revoked { revoked_at }, + }; + + Self { + id: value.oauth2_access_token_id.into(), + state, + session_id: value.oauth2_session_id.into(), + access_token: value.access_token, + created_at: value.created_at, + expires_at: value.expires_at, + first_used_at: value.first_used_at, + } + } +} + +#[async_trait] +impl OAuth2AccessTokenRepository for PgOAuth2AccessTokenRepository<'_> { + type Error = DatabaseError; + + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2AccessTokenLookup, + r#" + SELECT oauth2_access_token_id + , access_token + , created_at + , expires_at + , revoked_at + , oauth2_session_id + , first_used_at + + FROM oauth2_access_tokens + + WHERE oauth2_access_token_id = $1 + "#, + Uuid::from(id), + ) + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.oauth2_access_token.find_by_token", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn find_by_token( + &mut self, + access_token: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2AccessTokenLookup, + r#" + SELECT oauth2_access_token_id + , access_token + , created_at + , expires_at + , revoked_at + , oauth2_session_id + , first_used_at + + FROM oauth2_access_tokens + + WHERE access_token = $1 + "#, + access_token, + ) + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.oauth2_access_token.add", + skip_all, + fields( + db.query.text, + %session.id, + client.id = %session.client_id, + access_token.id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + session: &Session, + access_token: String, + expires_after: Option, + ) -> Result { + let created_at = clock.now(); + let expires_at = expires_after.map(|d| created_at + d); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + + tracing::Span::current().record("access_token.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO oauth2_access_tokens + (oauth2_access_token_id, oauth2_session_id, access_token, created_at, expires_at) + VALUES + ($1, $2, $3, $4, $5) + "#, + Uuid::from(id), + Uuid::from(session.id), + &access_token, + created_at, + expires_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(AccessToken { + id, + state: AccessTokenState::default(), + access_token, + session_id: session.id, + created_at, + expires_at, + first_used_at: None, + }) + } + + #[tracing::instrument( + name = "db.oauth2_access_token.revoke", + skip_all, + fields( + db.query.text, + session.id = %access_token.session_id, + %access_token.id, + ), + err, + )] + async fn revoke( + &mut self, + clock: &dyn Clock, + access_token: AccessToken, + ) -> Result { + let revoked_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE oauth2_access_tokens + SET revoked_at = $2 + WHERE oauth2_access_token_id = $1 + "#, + Uuid::from(access_token.id), + revoked_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + access_token + .revoke(revoked_at) + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.oauth2_access_token.mark_used", + skip_all, + fields( + db.query.text, + session.id = %access_token.session_id, + %access_token.id, + ), + err, + )] + async fn mark_used( + &mut self, + clock: &dyn Clock, + mut access_token: AccessToken, + ) -> Result { + let now = clock.now(); + let res = sqlx::query!( + r#" + UPDATE oauth2_access_tokens + SET first_used_at = $2 + WHERE oauth2_access_token_id = $1 + "#, + Uuid::from(access_token.id), + now, + ) + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + access_token.first_used_at = Some(now); + + Ok(access_token) + } + + #[tracing::instrument( + name = "db.oauth2_access_token.cleanup_revoked", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup_revoked( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH + to_delete AS ( + SELECT oauth2_access_token_id + FROM oauth2_access_tokens + WHERE revoked_at IS NOT NULL + AND ($1::timestamptz IS NULL OR revoked_at >= $1::timestamptz) + AND revoked_at < $2::timestamptz + ORDER BY revoked_at ASC + LIMIT $3 + FOR UPDATE + ), + + deleted AS ( + DELETE FROM oauth2_access_tokens + USING to_delete + WHERE oauth2_access_tokens.oauth2_access_token_id = to_delete.oauth2_access_token_id + RETURNING oauth2_access_tokens.revoked_at + ) + + SELECT + COUNT(*) as "count!", + MAX(revoked_at) as last_revoked_at + FROM deleted + "#, + since, + until, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_revoked_at, + )) + } + + #[tracing::instrument( + name = "db.oauth2_access_token.cleanup_expired", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup_expired( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH + to_delete AS ( + SELECT oauth2_access_token_id + FROM oauth2_access_tokens + WHERE expires_at IS NOT NULL + AND ($1::timestamptz IS NULL OR expires_at >= $1::timestamptz) + AND expires_at < $2::timestamptz + ORDER BY expires_at ASC + LIMIT $3 + FOR UPDATE + ), + + deleted AS ( + DELETE FROM oauth2_access_tokens + USING to_delete + WHERE oauth2_access_tokens.oauth2_access_token_id = to_delete.oauth2_access_token_id + RETURNING oauth2_access_tokens.expires_at + ) + + SELECT + COUNT(*) as "count!", + MAX(expires_at) as last_expires_at + FROM deleted + "#, + since, + until, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_expires_at, + )) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/oauth2/authorization_grant.rs b/matrix-authentication-service/crates/storage-pg/src/oauth2/authorization_grant.rs new file mode 100644 index 00000000..827a930a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/oauth2/authorization_grant.rs @@ -0,0 +1,502 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + AuthorizationCode, AuthorizationGrant, AuthorizationGrantStage, Client, Clock, Pkce, Session, +}; +use mas_iana::oauth::PkceCodeChallengeMethod; +use mas_storage::oauth2::OAuth2AuthorizationGrantRepository; +use oauth2_types::{requests::ResponseMode, scope::Scope}; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use url::Url; +use uuid::Uuid; + +use crate::{DatabaseError, DatabaseInconsistencyError, tracing::ExecuteExt}; + +/// An implementation of [`OAuth2AuthorizationGrantRepository`] for a PostgreSQL +/// connection +pub struct PgOAuth2AuthorizationGrantRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgOAuth2AuthorizationGrantRepository<'c> { + /// Create a new [`PgOAuth2AuthorizationGrantRepository`] from an active + /// PostgreSQL connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[allow(clippy::struct_excessive_bools)] +struct GrantLookup { + oauth2_authorization_grant_id: Uuid, + created_at: DateTime, + cancelled_at: Option>, + fulfilled_at: Option>, + exchanged_at: Option>, + scope: String, + state: Option, + nonce: Option, + redirect_uri: String, + response_mode: String, + response_type_code: bool, + response_type_id_token: bool, + authorization_code: Option, + code_challenge: Option, + code_challenge_method: Option, + login_hint: Option, + locale: Option, + oauth2_client_id: Uuid, + oauth2_session_id: Option, +} + +impl TryFrom for AuthorizationGrant { + type Error = DatabaseInconsistencyError; + + fn try_from(value: GrantLookup) -> Result { + let id = value.oauth2_authorization_grant_id.into(); + let scope: Scope = value.scope.parse().map_err(|e| { + DatabaseInconsistencyError::on("oauth2_authorization_grants") + .column("scope") + .row(id) + .source(e) + })?; + + let stage = match ( + value.fulfilled_at, + value.exchanged_at, + value.cancelled_at, + value.oauth2_session_id, + ) { + (None, None, None, None) => AuthorizationGrantStage::Pending, + (Some(fulfilled_at), None, None, Some(session_id)) => { + AuthorizationGrantStage::Fulfilled { + session_id: session_id.into(), + fulfilled_at, + } + } + (Some(fulfilled_at), Some(exchanged_at), None, Some(session_id)) => { + AuthorizationGrantStage::Exchanged { + session_id: session_id.into(), + fulfilled_at, + exchanged_at, + } + } + (None, None, Some(cancelled_at), None) => { + AuthorizationGrantStage::Cancelled { cancelled_at } + } + _ => { + return Err( + DatabaseInconsistencyError::on("oauth2_authorization_grants") + .column("stage") + .row(id), + ); + } + }; + + let pkce = match (value.code_challenge, value.code_challenge_method) { + (Some(challenge), Some(challenge_method)) if challenge_method == "plain" => { + Some(Pkce { + challenge_method: PkceCodeChallengeMethod::Plain, + challenge, + }) + } + (Some(challenge), Some(challenge_method)) if challenge_method == "S256" => Some(Pkce { + challenge_method: PkceCodeChallengeMethod::S256, + challenge, + }), + (None, None) => None, + _ => { + return Err( + DatabaseInconsistencyError::on("oauth2_authorization_grants") + .column("code_challenge_method") + .row(id), + ); + } + }; + + let code: Option = + match (value.response_type_code, value.authorization_code, pkce) { + (false, None, None) => None, + (true, Some(code), pkce) => Some(AuthorizationCode { code, pkce }), + _ => { + return Err( + DatabaseInconsistencyError::on("oauth2_authorization_grants") + .column("authorization_code") + .row(id), + ); + } + }; + + let redirect_uri = value.redirect_uri.parse().map_err(|e| { + DatabaseInconsistencyError::on("oauth2_authorization_grants") + .column("redirect_uri") + .row(id) + .source(e) + })?; + + let response_mode = value.response_mode.parse().map_err(|e| { + DatabaseInconsistencyError::on("oauth2_authorization_grants") + .column("response_mode") + .row(id) + .source(e) + })?; + + Ok(AuthorizationGrant { + id, + stage, + client_id: value.oauth2_client_id.into(), + code, + scope, + state: value.state, + nonce: value.nonce, + response_mode, + redirect_uri, + created_at: value.created_at, + response_type_id_token: value.response_type_id_token, + login_hint: value.login_hint, + locale: value.locale, + }) + } +} + +#[async_trait] +impl OAuth2AuthorizationGrantRepository for PgOAuth2AuthorizationGrantRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.oauth2_authorization_grant.add", + skip_all, + fields( + db.query.text, + grant.id, + grant.scope = %scope, + %client.id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + redirect_uri: Url, + scope: Scope, + code: Option, + state: Option, + nonce: Option, + response_mode: ResponseMode, + response_type_id_token: bool, + login_hint: Option, + locale: Option, + ) -> Result { + let code_challenge = code + .as_ref() + .and_then(|c| c.pkce.as_ref()) + .map(|p| &p.challenge); + let code_challenge_method = code + .as_ref() + .and_then(|c| c.pkce.as_ref()) + .map(|p| p.challenge_method.to_string()); + let code_str = code.as_ref().map(|c| &c.code); + + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("grant.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO oauth2_authorization_grants ( + oauth2_authorization_grant_id, + oauth2_client_id, + redirect_uri, + scope, + state, + nonce, + response_mode, + code_challenge, + code_challenge_method, + response_type_code, + response_type_id_token, + authorization_code, + login_hint, + locale, + created_at + ) + VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15) + "#, + Uuid::from(id), + Uuid::from(client.id), + redirect_uri.to_string(), + scope.to_string(), + state, + nonce, + response_mode.to_string(), + code_challenge, + code_challenge_method, + code.is_some(), + response_type_id_token, + code_str, + login_hint, + locale, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(AuthorizationGrant { + id, + stage: AuthorizationGrantStage::Pending, + code, + redirect_uri, + client_id: client.id, + scope, + state, + nonce, + response_mode, + created_at, + response_type_id_token, + login_hint, + locale, + }) + } + + #[tracing::instrument( + name = "db.oauth2_authorization_grant.lookup", + skip_all, + fields( + db.query.text, + grant.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + GrantLookup, + r#" + SELECT oauth2_authorization_grant_id + , created_at + , cancelled_at + , fulfilled_at + , exchanged_at + , scope + , state + , redirect_uri + , response_mode + , nonce + , oauth2_client_id + , authorization_code + , response_type_code + , response_type_id_token + , code_challenge + , code_challenge_method + , login_hint + , locale + , oauth2_session_id + FROM + oauth2_authorization_grants + + WHERE oauth2_authorization_grant_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_authorization_grant.find_by_code", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn find_by_code( + &mut self, + code: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + GrantLookup, + r#" + SELECT oauth2_authorization_grant_id + , created_at + , cancelled_at + , fulfilled_at + , exchanged_at + , scope + , state + , redirect_uri + , response_mode + , nonce + , oauth2_client_id + , authorization_code + , response_type_code + , response_type_id_token + , code_challenge + , code_challenge_method + , login_hint + , locale + , oauth2_session_id + FROM + oauth2_authorization_grants + + WHERE authorization_code = $1 + "#, + code, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_authorization_grant.fulfill", + skip_all, + fields( + db.query.text, + %grant.id, + client.id = %grant.client_id, + %session.id, + ), + err, + )] + async fn fulfill( + &mut self, + clock: &dyn Clock, + session: &Session, + grant: AuthorizationGrant, + ) -> Result { + let fulfilled_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE oauth2_authorization_grants + SET fulfilled_at = $2 + , oauth2_session_id = $3 + WHERE oauth2_authorization_grant_id = $1 + "#, + Uuid::from(grant.id), + fulfilled_at, + Uuid::from(session.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + // XXX: check affected rows & new methods + let grant = grant + .fulfill(fulfilled_at, session) + .map_err(DatabaseError::to_invalid_operation)?; + + Ok(grant) + } + + #[tracing::instrument( + name = "db.oauth2_authorization_grant.exchange", + skip_all, + fields( + db.query.text, + %grant.id, + client.id = %grant.client_id, + ), + err, + )] + async fn exchange( + &mut self, + clock: &dyn Clock, + grant: AuthorizationGrant, + ) -> Result { + let exchanged_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE oauth2_authorization_grants + SET exchanged_at = $2 + WHERE oauth2_authorization_grant_id = $1 + "#, + Uuid::from(grant.id), + exchanged_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + let grant = grant + .exchange(exchanged_at) + .map_err(DatabaseError::to_invalid_operation)?; + + Ok(grant) + } + + #[tracing::instrument( + name = "db.oauth2_authorization_grant.cleanup", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error> { + // `MAX(uuid)` isn't a thing in Postgres, so we can't just re-select the + // deleted rows and do a MAX on the `oauth2_authorization_grant_id`. + // Instead, we do the aggregation on the client side, which is a little + // less efficient, but good enough. + let res = sqlx::query_scalar!( + r#" + WITH to_delete AS ( + SELECT oauth2_authorization_grant_id + FROM oauth2_authorization_grants + WHERE ($1::uuid IS NULL OR oauth2_authorization_grant_id > $1) + AND oauth2_authorization_grant_id <= $2 + ORDER BY oauth2_authorization_grant_id + LIMIT $3 + ) + DELETE FROM oauth2_authorization_grants + USING to_delete + WHERE oauth2_authorization_grants.oauth2_authorization_grant_id = to_delete.oauth2_authorization_grant_id + RETURNING oauth2_authorization_grants.oauth2_authorization_grant_id + "#, + since.map(Uuid::from), + Uuid::from(until), + i64::try_from(limit).unwrap_or(i64::MAX) + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let count = res.len(); + let max_id = res.into_iter().max(); + + Ok((count, max_id.map(Ulid::from))) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/oauth2/client.rs b/matrix-authentication-service/crates/storage-pg/src/oauth2/client.rs new file mode 100644 index 00000000..bdfa6014 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/oauth2/client.rs @@ -0,0 +1,851 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{ + collections::{BTreeMap, BTreeSet}, + string::ToString, +}; + +use async_trait::async_trait; +use mas_data_model::{Client, Clock, JwksOrJwksUri}; +use mas_iana::{jose::JsonWebSignatureAlg, oauth::OAuthClientAuthenticationMethod}; +use mas_jose::jwk::PublicJsonWebKeySet; +use mas_storage::oauth2::OAuth2ClientRepository; +use oauth2_types::{oidc::ApplicationType, requests::GrantType}; +use opentelemetry_semantic_conventions::attribute::DB_QUERY_TEXT; +use rand::RngCore; +use sqlx::PgConnection; +use tracing::{Instrument, info_span}; +use ulid::Ulid; +use url::Url; +use uuid::Uuid; + +use crate::{DatabaseError, DatabaseInconsistencyError, tracing::ExecuteExt}; + +/// An implementation of [`OAuth2ClientRepository`] for a PostgreSQL connection +pub struct PgOAuth2ClientRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgOAuth2ClientRepository<'c> { + /// Create a new [`PgOAuth2ClientRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[allow(clippy::struct_excessive_bools)] +#[derive(Debug)] +struct OAuth2ClientLookup { + oauth2_client_id: Uuid, + metadata_digest: Option, + encrypted_client_secret: Option, + application_type: Option, + redirect_uris: Vec, + grant_type_authorization_code: bool, + grant_type_refresh_token: bool, + grant_type_client_credentials: bool, + grant_type_device_code: bool, + client_name: Option, + logo_uri: Option, + client_uri: Option, + policy_uri: Option, + tos_uri: Option, + jwks_uri: Option, + jwks: Option, + id_token_signed_response_alg: Option, + userinfo_signed_response_alg: Option, + token_endpoint_auth_method: Option, + token_endpoint_auth_signing_alg: Option, + initiate_login_uri: Option, +} + +impl TryInto for OAuth2ClientLookup { + type Error = DatabaseInconsistencyError; + + fn try_into(self) -> Result { + let id = Ulid::from(self.oauth2_client_id); + + let redirect_uris: Result, _> = + self.redirect_uris.iter().map(|s| s.parse()).collect(); + let redirect_uris = redirect_uris.map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("redirect_uris") + .row(id) + .source(e) + })?; + + let application_type = self + .application_type + .map(|s| s.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("application_type") + .row(id) + .source(e) + })?; + + let mut grant_types = Vec::new(); + if self.grant_type_authorization_code { + grant_types.push(GrantType::AuthorizationCode); + } + if self.grant_type_refresh_token { + grant_types.push(GrantType::RefreshToken); + } + if self.grant_type_client_credentials { + grant_types.push(GrantType::ClientCredentials); + } + if self.grant_type_device_code { + grant_types.push(GrantType::DeviceCode); + } + + let logo_uri = self.logo_uri.map(|s| s.parse()).transpose().map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("logo_uri") + .row(id) + .source(e) + })?; + + let client_uri = self + .client_uri + .map(|s| s.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("client_uri") + .row(id) + .source(e) + })?; + + let policy_uri = self + .policy_uri + .map(|s| s.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("policy_uri") + .row(id) + .source(e) + })?; + + let tos_uri = self.tos_uri.map(|s| s.parse()).transpose().map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("tos_uri") + .row(id) + .source(e) + })?; + + let id_token_signed_response_alg = self + .id_token_signed_response_alg + .map(|s| s.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("id_token_signed_response_alg") + .row(id) + .source(e) + })?; + + let userinfo_signed_response_alg = self + .userinfo_signed_response_alg + .map(|s| s.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("userinfo_signed_response_alg") + .row(id) + .source(e) + })?; + + let token_endpoint_auth_method = self + .token_endpoint_auth_method + .map(|s| s.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("token_endpoint_auth_method") + .row(id) + .source(e) + })?; + + let token_endpoint_auth_signing_alg = self + .token_endpoint_auth_signing_alg + .map(|s| s.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("token_endpoint_auth_signing_alg") + .row(id) + .source(e) + })?; + + let initiate_login_uri = self + .initiate_login_uri + .map(|s| s.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("initiate_login_uri") + .row(id) + .source(e) + })?; + + let jwks = match (self.jwks, self.jwks_uri) { + (None, None) => None, + (Some(jwks), None) => { + let jwks = serde_json::from_value(jwks).map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("jwks") + .row(id) + .source(e) + })?; + Some(JwksOrJwksUri::Jwks(jwks)) + } + (None, Some(jwks_uri)) => { + let jwks_uri = jwks_uri.parse().map_err(|e| { + DatabaseInconsistencyError::on("oauth2_clients") + .column("jwks_uri") + .row(id) + .source(e) + })?; + + Some(JwksOrJwksUri::JwksUri(jwks_uri)) + } + _ => { + return Err(DatabaseInconsistencyError::on("oauth2_clients") + .column("jwks(_uri)") + .row(id)); + } + }; + + Ok(Client { + id, + client_id: id.to_string(), + metadata_digest: self.metadata_digest, + encrypted_client_secret: self.encrypted_client_secret, + application_type, + redirect_uris, + grant_types, + client_name: self.client_name, + logo_uri, + client_uri, + policy_uri, + tos_uri, + jwks, + id_token_signed_response_alg, + userinfo_signed_response_alg, + token_endpoint_auth_method, + token_endpoint_auth_signing_alg, + initiate_login_uri, + }) + } +} + +#[async_trait] +impl OAuth2ClientRepository for PgOAuth2ClientRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.oauth2_client.lookup", + skip_all, + fields( + db.query.text, + oauth2_client.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2ClientLookup, + r#" + SELECT oauth2_client_id + , metadata_digest + , encrypted_client_secret + , application_type + , redirect_uris + , grant_type_authorization_code + , grant_type_refresh_token + , grant_type_client_credentials + , grant_type_device_code + , client_name + , logo_uri + , client_uri + , policy_uri + , tos_uri + , jwks_uri + , jwks + , id_token_signed_response_alg + , userinfo_signed_response_alg + , token_endpoint_auth_method + , token_endpoint_auth_signing_alg + , initiate_login_uri + FROM oauth2_clients c + + WHERE oauth2_client_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_client.find_by_metadata_digest", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn find_by_metadata_digest( + &mut self, + digest: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2ClientLookup, + r#" + SELECT oauth2_client_id + , metadata_digest + , encrypted_client_secret + , application_type + , redirect_uris + , grant_type_authorization_code + , grant_type_refresh_token + , grant_type_client_credentials + , grant_type_device_code + , client_name + , logo_uri + , client_uri + , policy_uri + , tos_uri + , jwks_uri + , jwks + , id_token_signed_response_alg + , userinfo_signed_response_alg + , token_endpoint_auth_method + , token_endpoint_auth_signing_alg + , initiate_login_uri + FROM oauth2_clients + WHERE metadata_digest = $1 + "#, + digest, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_client.load_batch", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn load_batch( + &mut self, + ids: BTreeSet, + ) -> Result, Self::Error> { + let ids: Vec = ids.into_iter().map(Uuid::from).collect(); + let res = sqlx::query_as!( + OAuth2ClientLookup, + r#" + SELECT oauth2_client_id + , metadata_digest + , encrypted_client_secret + , application_type + , redirect_uris + , grant_type_authorization_code + , grant_type_refresh_token + , grant_type_client_credentials + , grant_type_device_code + , client_name + , logo_uri + , client_uri + , policy_uri + , tos_uri + , jwks_uri + , jwks + , id_token_signed_response_alg + , userinfo_signed_response_alg + , token_endpoint_auth_method + , token_endpoint_auth_signing_alg + , initiate_login_uri + FROM oauth2_clients c + + WHERE oauth2_client_id = ANY($1::uuid[]) + "#, + &ids, + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + res.into_iter() + .map(|r| { + r.try_into() + .map(|c: Client| (c.id, c)) + .map_err(DatabaseError::from) + }) + .collect() + } + + #[tracing::instrument( + name = "db.oauth2_client.add", + skip_all, + fields( + db.query.text, + client.id, + client.name = client_name + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + redirect_uris: Vec, + metadata_digest: Option, + encrypted_client_secret: Option, + application_type: Option, + grant_types: Vec, + client_name: Option, + logo_uri: Option, + client_uri: Option, + policy_uri: Option, + tos_uri: Option, + jwks_uri: Option, + jwks: Option, + id_token_signed_response_alg: Option, + userinfo_signed_response_alg: Option, + token_endpoint_auth_method: Option, + token_endpoint_auth_signing_alg: Option, + initiate_login_uri: Option, + ) -> Result { + let now = clock.now(); + let id = Ulid::from_datetime_with_source(now.into(), rng); + tracing::Span::current().record("client.id", tracing::field::display(id)); + + let jwks_json = jwks + .as_ref() + .map(serde_json::to_value) + .transpose() + .map_err(DatabaseError::to_invalid_operation)?; + + let redirect_uris_array = redirect_uris.iter().map(Url::to_string).collect::>(); + + sqlx::query!( + r#" + INSERT INTO oauth2_clients + ( oauth2_client_id + , metadata_digest + , encrypted_client_secret + , application_type + , redirect_uris + , grant_type_authorization_code + , grant_type_refresh_token + , grant_type_client_credentials + , grant_type_device_code + , client_name + , logo_uri + , client_uri + , policy_uri + , tos_uri + , jwks_uri + , jwks + , id_token_signed_response_alg + , userinfo_signed_response_alg + , token_endpoint_auth_method + , token_endpoint_auth_signing_alg + , initiate_login_uri + , is_static + ) + VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, + $14, $15, $16, $17, $18, $19, $20, $21, FALSE) + "#, + Uuid::from(id), + metadata_digest, + encrypted_client_secret, + application_type.as_ref().map(ToString::to_string), + &redirect_uris_array, + grant_types.contains(&GrantType::AuthorizationCode), + grant_types.contains(&GrantType::RefreshToken), + grant_types.contains(&GrantType::ClientCredentials), + grant_types.contains(&GrantType::DeviceCode), + client_name, + logo_uri.as_ref().map(Url::as_str), + client_uri.as_ref().map(Url::as_str), + policy_uri.as_ref().map(Url::as_str), + tos_uri.as_ref().map(Url::as_str), + jwks_uri.as_ref().map(Url::as_str), + jwks_json, + id_token_signed_response_alg + .as_ref() + .map(ToString::to_string), + userinfo_signed_response_alg + .as_ref() + .map(ToString::to_string), + token_endpoint_auth_method.as_ref().map(ToString::to_string), + token_endpoint_auth_signing_alg + .as_ref() + .map(ToString::to_string), + initiate_login_uri.as_ref().map(Url::as_str), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + let jwks = match (jwks, jwks_uri) { + (None, None) => None, + (Some(jwks), None) => Some(JwksOrJwksUri::Jwks(jwks)), + (None, Some(jwks_uri)) => Some(JwksOrJwksUri::JwksUri(jwks_uri)), + _ => return Err(DatabaseError::invalid_operation()), + }; + + Ok(Client { + id, + client_id: id.to_string(), + metadata_digest: None, + encrypted_client_secret, + application_type, + redirect_uris, + grant_types, + client_name, + logo_uri, + client_uri, + policy_uri, + tos_uri, + jwks, + id_token_signed_response_alg, + userinfo_signed_response_alg, + token_endpoint_auth_method, + token_endpoint_auth_signing_alg, + initiate_login_uri, + }) + } + + #[tracing::instrument( + name = "db.oauth2_client.upsert_static", + skip_all, + fields( + db.query.text, + client.id = %client_id, + ), + err, + )] + async fn upsert_static( + &mut self, + client_id: Ulid, + client_name: Option, + client_auth_method: OAuthClientAuthenticationMethod, + encrypted_client_secret: Option, + jwks: Option, + jwks_uri: Option, + redirect_uris: Vec, + ) -> Result { + let jwks_json = jwks + .as_ref() + .map(serde_json::to_value) + .transpose() + .map_err(DatabaseError::to_invalid_operation)?; + + let client_auth_method = client_auth_method.to_string(); + let redirect_uris_array = redirect_uris.iter().map(Url::to_string).collect::>(); + + sqlx::query!( + r#" + INSERT INTO oauth2_clients + ( oauth2_client_id + , encrypted_client_secret + , redirect_uris + , grant_type_authorization_code + , grant_type_refresh_token + , grant_type_client_credentials + , grant_type_device_code + , token_endpoint_auth_method + , jwks + , client_name + , jwks_uri + , is_static + ) + VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, TRUE) + ON CONFLICT (oauth2_client_id) + DO + UPDATE SET encrypted_client_secret = EXCLUDED.encrypted_client_secret + , redirect_uris = EXCLUDED.redirect_uris + , grant_type_authorization_code = EXCLUDED.grant_type_authorization_code + , grant_type_refresh_token = EXCLUDED.grant_type_refresh_token + , grant_type_client_credentials = EXCLUDED.grant_type_client_credentials + , grant_type_device_code = EXCLUDED.grant_type_device_code + , token_endpoint_auth_method = EXCLUDED.token_endpoint_auth_method + , jwks = EXCLUDED.jwks + , client_name = EXCLUDED.client_name + , jwks_uri = EXCLUDED.jwks_uri + , is_static = TRUE + "#, + Uuid::from(client_id), + encrypted_client_secret, + &redirect_uris_array, + true, + true, + true, + true, + client_auth_method, + jwks_json, + client_name, + jwks_uri.as_ref().map(Url::as_str), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + let jwks = match (jwks, jwks_uri) { + (None, None) => None, + (Some(jwks), None) => Some(JwksOrJwksUri::Jwks(jwks)), + (None, Some(jwks_uri)) => Some(JwksOrJwksUri::JwksUri(jwks_uri)), + _ => return Err(DatabaseError::invalid_operation()), + }; + + Ok(Client { + id: client_id, + client_id: client_id.to_string(), + metadata_digest: None, + encrypted_client_secret, + application_type: None, + redirect_uris, + grant_types: vec![ + GrantType::AuthorizationCode, + GrantType::RefreshToken, + GrantType::ClientCredentials, + ], + client_name, + logo_uri: None, + client_uri: None, + policy_uri: None, + tos_uri: None, + jwks, + id_token_signed_response_alg: None, + userinfo_signed_response_alg: None, + token_endpoint_auth_method: None, + token_endpoint_auth_signing_alg: None, + initiate_login_uri: None, + }) + } + + #[tracing::instrument( + name = "db.oauth2_client.all_static", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn all_static(&mut self) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2ClientLookup, + r#" + SELECT oauth2_client_id + , metadata_digest + , encrypted_client_secret + , application_type + , redirect_uris + , grant_type_authorization_code + , grant_type_refresh_token + , grant_type_client_credentials + , grant_type_device_code + , client_name + , logo_uri + , client_uri + , policy_uri + , tos_uri + , jwks_uri + , jwks + , id_token_signed_response_alg + , userinfo_signed_response_alg + , token_endpoint_auth_method + , token_endpoint_auth_signing_alg + , initiate_login_uri + FROM oauth2_clients c + WHERE is_static = TRUE + "#, + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + res.into_iter() + .map(|r| r.try_into().map_err(DatabaseError::from)) + .collect() + } + + #[tracing::instrument( + name = "db.oauth2_client.delete_by_id", + skip_all, + fields( + db.query.text, + client.id = %id, + ), + err, + )] + async fn delete_by_id(&mut self, id: Ulid) -> Result<(), Self::Error> { + // Delete the authorization grants + { + let span = info_span!( + "db.oauth2_client.delete_by_id.authorization_grants", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + + sqlx::query!( + r#" + DELETE FROM oauth2_authorization_grants + WHERE oauth2_client_id = $1 + "#, + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + } + + // Delete the OAuth 2 sessions related data + { + let span = info_span!( + "db.oauth2_client.delete_by_id.access_tokens", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + + sqlx::query!( + r#" + DELETE FROM oauth2_access_tokens + WHERE oauth2_session_id IN ( + SELECT oauth2_session_id + FROM oauth2_sessions + WHERE oauth2_client_id = $1 + ) + "#, + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + } + + { + let span = info_span!( + "db.oauth2_client.delete_by_id.refresh_tokens", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + + sqlx::query!( + r#" + DELETE FROM oauth2_refresh_tokens + WHERE oauth2_session_id IN ( + SELECT oauth2_session_id + FROM oauth2_sessions + WHERE oauth2_client_id = $1 + ) + "#, + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + } + + { + let span = info_span!( + "db.oauth2_client.delete_by_id.sessions", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + + sqlx::query!( + r#" + DELETE FROM oauth2_sessions + WHERE oauth2_client_id = $1 + "#, + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + } + + // Delete any personal access tokens & sessions owned + // by the client + { + let span = info_span!( + "db.oauth2_client.delete_by_id.personal_access_tokens", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + + sqlx::query!( + r#" + DELETE FROM personal_access_tokens + WHERE personal_session_id IN ( + SELECT personal_session_id + FROM personal_sessions + WHERE owner_oauth2_client_id = $1 + ) + "#, + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + } + { + let span = info_span!( + "db.oauth2_client.delete_by_id.personal_sessions", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + + sqlx::query!( + r#" + DELETE FROM personal_sessions + WHERE owner_oauth2_client_id = $1 + "#, + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + } + + // Now delete the client itself + let res = sqlx::query!( + r#" + DELETE FROM oauth2_clients + WHERE oauth2_client_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/oauth2/device_code_grant.rs b/matrix-authentication-service/crates/storage-pg/src/oauth2/device_code_grant.rs new file mode 100644 index 00000000..9d2767bd --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/oauth2/device_code_grant.rs @@ -0,0 +1,523 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{BrowserSession, Clock, DeviceCodeGrant, DeviceCodeGrantState, Session}; +use mas_storage::oauth2::{OAuth2DeviceCodeGrantParams, OAuth2DeviceCodeGrantRepository}; +use oauth2_types::scope::Scope; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, ExecuteExt, errors::DatabaseInconsistencyError}; + +/// An implementation of [`OAuth2DeviceCodeGrantRepository`] for a PostgreSQL +/// connection +pub struct PgOAuth2DeviceCodeGrantRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgOAuth2DeviceCodeGrantRepository<'c> { + /// Create a new [`PgOAuth2DeviceCodeGrantRepository`] from an active + /// PostgreSQL connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct OAuth2DeviceGrantLookup { + oauth2_device_code_grant_id: Uuid, + oauth2_client_id: Uuid, + scope: String, + device_code: String, + user_code: String, + created_at: DateTime, + expires_at: DateTime, + fulfilled_at: Option>, + rejected_at: Option>, + exchanged_at: Option>, + user_session_id: Option, + oauth2_session_id: Option, + ip_address: Option, + user_agent: Option, +} + +impl TryFrom for DeviceCodeGrant { + type Error = DatabaseInconsistencyError; + + fn try_from( + OAuth2DeviceGrantLookup { + oauth2_device_code_grant_id, + oauth2_client_id, + scope, + device_code, + user_code, + created_at, + expires_at, + fulfilled_at, + rejected_at, + exchanged_at, + user_session_id, + oauth2_session_id, + ip_address, + user_agent, + }: OAuth2DeviceGrantLookup, + ) -> Result { + let id = Ulid::from(oauth2_device_code_grant_id); + let client_id = Ulid::from(oauth2_client_id); + + let scope: Scope = scope.parse().map_err(|e| { + DatabaseInconsistencyError::on("oauth2_authorization_grants") + .column("scope") + .row(id) + .source(e) + })?; + + let state = match ( + fulfilled_at, + rejected_at, + exchanged_at, + user_session_id, + oauth2_session_id, + ) { + (None, None, None, None, None) => DeviceCodeGrantState::Pending, + + (Some(fulfilled_at), None, None, Some(user_session_id), None) => { + DeviceCodeGrantState::Fulfilled { + browser_session_id: Ulid::from(user_session_id), + fulfilled_at, + } + } + + (None, Some(rejected_at), None, Some(user_session_id), None) => { + DeviceCodeGrantState::Rejected { + browser_session_id: Ulid::from(user_session_id), + rejected_at, + } + } + + ( + Some(fulfilled_at), + None, + Some(exchanged_at), + Some(user_session_id), + Some(oauth2_session_id), + ) => DeviceCodeGrantState::Exchanged { + browser_session_id: Ulid::from(user_session_id), + session_id: Ulid::from(oauth2_session_id), + fulfilled_at, + exchanged_at, + }, + + _ => return Err(DatabaseInconsistencyError::on("oauth2_device_code_grant").row(id)), + }; + + Ok(DeviceCodeGrant { + id, + state, + client_id, + scope, + user_code, + device_code, + created_at, + expires_at, + ip_address, + user_agent, + }) + } +} + +#[async_trait] +impl OAuth2DeviceCodeGrantRepository for PgOAuth2DeviceCodeGrantRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.oauth2_device_code_grant.add", + skip_all, + fields( + db.query.text, + oauth2_device_code.id, + oauth2_device_code.scope = %params.scope, + oauth2_client.id = %params.client.id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + params: OAuth2DeviceCodeGrantParams<'_>, + ) -> Result { + let now = clock.now(); + let id = Ulid::from_datetime_with_source(now.into(), rng); + tracing::Span::current().record("oauth2_device_code.id", tracing::field::display(id)); + + let created_at = now; + let expires_at = now + params.expires_in; + let client_id = params.client.id; + + sqlx::query!( + r#" + INSERT INTO "oauth2_device_code_grant" + ( oauth2_device_code_grant_id + , oauth2_client_id + , scope + , device_code + , user_code + , created_at + , expires_at + , ip_address + , user_agent + ) + VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9) + "#, + Uuid::from(id), + Uuid::from(client_id), + params.scope.to_string(), + ¶ms.device_code, + ¶ms.user_code, + created_at, + expires_at, + params.ip_address as Option, + params.user_agent.as_deref(), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(DeviceCodeGrant { + id, + state: DeviceCodeGrantState::Pending, + client_id, + scope: params.scope, + user_code: params.user_code, + device_code: params.device_code, + created_at, + expires_at, + ip_address: params.ip_address, + user_agent: params.user_agent, + }) + } + + #[tracing::instrument( + name = "db.oauth2_device_code_grant.lookup", + skip_all, + fields( + db.query.text, + oauth2_device_code.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2DeviceGrantLookup, + r#" + SELECT oauth2_device_code_grant_id + , oauth2_client_id + , scope + , device_code + , user_code + , created_at + , expires_at + , fulfilled_at + , rejected_at + , exchanged_at + , user_session_id + , oauth2_session_id + , ip_address as "ip_address: IpAddr" + , user_agent + FROM + oauth2_device_code_grant + + WHERE oauth2_device_code_grant_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_device_code_grant.find_by_user_code", + skip_all, + fields( + db.query.text, + oauth2_device_code.user_code = %user_code, + ), + err, + )] + async fn find_by_user_code( + &mut self, + user_code: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2DeviceGrantLookup, + r#" + SELECT oauth2_device_code_grant_id + , oauth2_client_id + , scope + , device_code + , user_code + , created_at + , expires_at + , fulfilled_at + , rejected_at + , exchanged_at + , user_session_id + , oauth2_session_id + , ip_address as "ip_address: IpAddr" + , user_agent + FROM + oauth2_device_code_grant + + WHERE user_code = $1 + "#, + user_code, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_device_code_grant.find_by_device_code", + skip_all, + fields( + db.query.text, + oauth2_device_code.device_code = %device_code, + ), + err, + )] + async fn find_by_device_code( + &mut self, + device_code: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2DeviceGrantLookup, + r#" + SELECT oauth2_device_code_grant_id + , oauth2_client_id + , scope + , device_code + , user_code + , created_at + , expires_at + , fulfilled_at + , rejected_at + , exchanged_at + , user_session_id + , oauth2_session_id + , ip_address as "ip_address: IpAddr" + , user_agent + FROM + oauth2_device_code_grant + + WHERE device_code = $1 + "#, + device_code, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_device_code_grant.fulfill", + skip_all, + fields( + db.query.text, + oauth2_device_code.id = %device_code_grant.id, + oauth2_client.id = %device_code_grant.client_id, + browser_session.id = %browser_session.id, + user.id = %browser_session.user.id, + ), + err, + )] + async fn fulfill( + &mut self, + clock: &dyn Clock, + device_code_grant: DeviceCodeGrant, + browser_session: &BrowserSession, + ) -> Result { + let fulfilled_at = clock.now(); + let device_code_grant = device_code_grant + .fulfill(browser_session, fulfilled_at) + .map_err(DatabaseError::to_invalid_operation)?; + + let res = sqlx::query!( + r#" + UPDATE oauth2_device_code_grant + SET fulfilled_at = $1 + , user_session_id = $2 + WHERE oauth2_device_code_grant_id = $3 + "#, + fulfilled_at, + Uuid::from(browser_session.id), + Uuid::from(device_code_grant.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(device_code_grant) + } + + #[tracing::instrument( + name = "db.oauth2_device_code_grant.reject", + skip_all, + fields( + db.query.text, + oauth2_device_code.id = %device_code_grant.id, + oauth2_client.id = %device_code_grant.client_id, + browser_session.id = %browser_session.id, + user.id = %browser_session.user.id, + ), + err, + )] + async fn reject( + &mut self, + clock: &dyn Clock, + device_code_grant: DeviceCodeGrant, + browser_session: &BrowserSession, + ) -> Result { + let fulfilled_at = clock.now(); + let device_code_grant = device_code_grant + .reject(browser_session, fulfilled_at) + .map_err(DatabaseError::to_invalid_operation)?; + + let res = sqlx::query!( + r#" + UPDATE oauth2_device_code_grant + SET rejected_at = $1 + , user_session_id = $2 + WHERE oauth2_device_code_grant_id = $3 + "#, + fulfilled_at, + Uuid::from(browser_session.id), + Uuid::from(device_code_grant.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(device_code_grant) + } + + #[tracing::instrument( + name = "db.oauth2_device_code_grant.exchange", + skip_all, + fields( + db.query.text, + oauth2_device_code.id = %device_code_grant.id, + oauth2_client.id = %device_code_grant.client_id, + oauth2_session.id = %session.id, + ), + err, + )] + async fn exchange( + &mut self, + clock: &dyn Clock, + device_code_grant: DeviceCodeGrant, + session: &Session, + ) -> Result { + let exchanged_at = clock.now(); + let device_code_grant = device_code_grant + .exchange(session, exchanged_at) + .map_err(DatabaseError::to_invalid_operation)?; + + let res = sqlx::query!( + r#" + UPDATE oauth2_device_code_grant + SET exchanged_at = $1 + , oauth2_session_id = $2 + WHERE oauth2_device_code_grant_id = $3 + "#, + exchanged_at, + Uuid::from(session.id), + Uuid::from(device_code_grant.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(device_code_grant) + } + + #[tracing::instrument( + name = "db.oauth2_device_code_grant.cleanup", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error> { + // `MAX(uuid)` isn't a thing in Postgres, so we can't just re-select the + // deleted rows and do a MAX on the `oauth2_device_code_grant_id`. + // Instead, we do the aggregation on the client side, which is a little + // less efficient, but good enough. + let res = sqlx::query_scalar!( + r#" + WITH to_delete AS ( + SELECT oauth2_device_code_grant_id + FROM oauth2_device_code_grant + WHERE ($1::uuid IS NULL OR oauth2_device_code_grant_id > $1) + AND oauth2_device_code_grant_id <= $2 + ORDER BY oauth2_device_code_grant_id + LIMIT $3 + ) + DELETE FROM oauth2_device_code_grant + USING to_delete + WHERE oauth2_device_code_grant.oauth2_device_code_grant_id = to_delete.oauth2_device_code_grant_id + RETURNING oauth2_device_code_grant.oauth2_device_code_grant_id + "#, + since.map(Uuid::from), + Uuid::from(until), + i64::try_from(limit).unwrap_or(i64::MAX) + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let count = res.len(); + let max_id = res.into_iter().max(); + + Ok((count, max_id.map(Ulid::from))) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/oauth2/mod.rs b/matrix-authentication-service/crates/storage-pg/src/oauth2/mod.rs new file mode 100644 index 00000000..bf741b5f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/oauth2/mod.rs @@ -0,0 +1,947 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing the PostgreSQL implementations of the OAuth2-related +//! repositories + +mod access_token; +mod authorization_grant; +mod client; +mod device_code_grant; +mod refresh_token; +mod session; + +pub use self::{ + access_token::PgOAuth2AccessTokenRepository, + authorization_grant::PgOAuth2AuthorizationGrantRepository, client::PgOAuth2ClientRepository, + device_code_grant::PgOAuth2DeviceCodeGrantRepository, + refresh_token::PgOAuth2RefreshTokenRepository, session::PgOAuth2SessionRepository, +}; + +#[cfg(test)] +mod tests { + use chrono::Duration; + use mas_data_model::{AuthorizationCode, Clock, clock::MockClock}; + use mas_storage::{ + Pagination, + oauth2::{OAuth2DeviceCodeGrantParams, OAuth2SessionFilter, OAuth2SessionRepository}, + }; + use oauth2_types::{ + requests::{GrantType, ResponseMode}, + scope::{EMAIL, OPENID, PROFILE, Scope}, + }; + use rand::SeedableRng; + use rand_chacha::ChaChaRng; + use sqlx::PgPool; + use ulid::Ulid; + + use crate::PgRepository; + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_repositories(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Lookup a non-existing client + let client = repo.oauth2_client().lookup(Ulid::nil()).await.unwrap(); + assert_eq!(client, None); + + // Find a non-existing client by client id + let client = repo + .oauth2_client() + .find_by_client_id("some-client-id") + .await + .unwrap(); + assert_eq!(client, None); + + // Create a client + let client = repo + .oauth2_client() + .add( + &mut rng, + &clock, + vec!["https://example.com/redirect".parse().unwrap()], + None, + None, + None, + vec![GrantType::AuthorizationCode], + Some("Test client".to_owned()), + Some("https://example.com/logo.png".parse().unwrap()), + Some("https://example.com/".parse().unwrap()), + Some("https://example.com/policy".parse().unwrap()), + Some("https://example.com/tos".parse().unwrap()), + Some("https://example.com/jwks.json".parse().unwrap()), + None, + None, + None, + None, + None, + Some("https://example.com/login".parse().unwrap()), + ) + .await + .unwrap(); + + // Lookup the same client by id + let client_lookup = repo + .oauth2_client() + .lookup(client.id) + .await + .unwrap() + .expect("client not found"); + assert_eq!(client, client_lookup); + + // Find the same client by client id + let client_lookup = repo + .oauth2_client() + .find_by_client_id(&client.client_id) + .await + .unwrap() + .expect("client not found"); + assert_eq!(client, client_lookup); + + // Lookup a non-existing grant + let grant = repo + .oauth2_authorization_grant() + .lookup(Ulid::nil()) + .await + .unwrap(); + assert_eq!(grant, None); + + // Find a non-existing grant by code + let grant = repo + .oauth2_authorization_grant() + .find_by_code("code") + .await + .unwrap(); + assert_eq!(grant, None); + + // Create an authorization grant + let grant = repo + .oauth2_authorization_grant() + .add( + &mut rng, + &clock, + &client, + "https://example.com/redirect".parse().unwrap(), + Scope::from_iter([OPENID]), + Some(AuthorizationCode { + code: "code".to_owned(), + pkce: None, + }), + Some("state".to_owned()), + Some("nonce".to_owned()), + ResponseMode::Query, + true, + None, + None, + ) + .await + .unwrap(); + assert!(grant.is_pending()); + + // Lookup the same grant by id + let grant_lookup = repo + .oauth2_authorization_grant() + .lookup(grant.id) + .await + .unwrap() + .expect("grant not found"); + assert_eq!(grant, grant_lookup); + + // Find the same grant by code + let grant_lookup = repo + .oauth2_authorization_grant() + .find_by_code("code") + .await + .unwrap() + .expect("grant not found"); + assert_eq!(grant, grant_lookup); + + // Create a user and a start a user session + let user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + let user_session = repo + .browser_session() + .add(&mut rng, &clock, &user, None) + .await + .unwrap(); + + // Lookup a non-existing session + let session = repo.oauth2_session().lookup(Ulid::nil()).await.unwrap(); + assert_eq!(session, None); + + // Create an OAuth session + let session = repo + .oauth2_session() + .add_from_browser_session( + &mut rng, + &clock, + &client, + &user_session, + grant.scope.clone(), + ) + .await + .unwrap(); + + // Mark the grant as fulfilled + let grant = repo + .oauth2_authorization_grant() + .fulfill(&clock, &session, grant) + .await + .unwrap(); + assert!(grant.is_fulfilled()); + + // Lookup the same session by id + let session_lookup = repo + .oauth2_session() + .lookup(session.id) + .await + .unwrap() + .expect("session not found"); + assert_eq!(session, session_lookup); + + // Mark the grant as exchanged + let grant = repo + .oauth2_authorization_grant() + .exchange(&clock, grant) + .await + .unwrap(); + assert!(grant.is_exchanged()); + + // Lookup a non-existing token + let token = repo + .oauth2_access_token() + .lookup(Ulid::nil()) + .await + .unwrap(); + assert_eq!(token, None); + + // Find a non-existing token + let token = repo + .oauth2_access_token() + .find_by_token("aabbcc") + .await + .unwrap(); + assert_eq!(token, None); + + // Create an access token + let access_token = repo + .oauth2_access_token() + .add( + &mut rng, + &clock, + &session, + "aabbcc".to_owned(), + Some(Duration::try_minutes(5).unwrap()), + ) + .await + .unwrap(); + + // Lookup the same token by id + let access_token_lookup = repo + .oauth2_access_token() + .lookup(access_token.id) + .await + .unwrap() + .expect("token not found"); + assert_eq!(access_token, access_token_lookup); + + // Find the same token by token + let access_token_lookup = repo + .oauth2_access_token() + .find_by_token("aabbcc") + .await + .unwrap() + .expect("token not found"); + assert_eq!(access_token, access_token_lookup); + + // Lookup a non-existing refresh token + let refresh_token = repo + .oauth2_refresh_token() + .lookup(Ulid::nil()) + .await + .unwrap(); + assert_eq!(refresh_token, None); + + // Find a non-existing refresh token + let refresh_token = repo + .oauth2_refresh_token() + .find_by_token("aabbcc") + .await + .unwrap(); + assert_eq!(refresh_token, None); + + // Create a refresh token + let refresh_token = repo + .oauth2_refresh_token() + .add( + &mut rng, + &clock, + &session, + &access_token, + "aabbcc".to_owned(), + ) + .await + .unwrap(); + + // Lookup the same refresh token by id + let refresh_token_lookup = repo + .oauth2_refresh_token() + .lookup(refresh_token.id) + .await + .unwrap() + .expect("refresh token not found"); + assert_eq!(refresh_token, refresh_token_lookup); + + // Find the same refresh token by token + let refresh_token_lookup = repo + .oauth2_refresh_token() + .find_by_token("aabbcc") + .await + .unwrap() + .expect("refresh token not found"); + assert_eq!(refresh_token, refresh_token_lookup); + + assert!(access_token.is_valid(clock.now())); + clock.advance(Duration::try_minutes(6).unwrap()); + assert!(!access_token.is_valid(clock.now())); + + // XXX: we might want to create a new access token + clock.advance(Duration::try_minutes(-6).unwrap()); // Go back in time + assert!(access_token.is_valid(clock.now())); + + // Create a new refresh token to be able to consume the old one + let new_refresh_token = repo + .oauth2_refresh_token() + .add( + &mut rng, + &clock, + &session, + &access_token, + "ddeeff".to_owned(), + ) + .await + .unwrap(); + + // Mark the access token as revoked + let access_token = repo + .oauth2_access_token() + .revoke(&clock, access_token) + .await + .unwrap(); + assert!(!access_token.is_valid(clock.now())); + + // Mark the refresh token as consumed + assert!(refresh_token.is_valid()); + let refresh_token = repo + .oauth2_refresh_token() + .consume(&clock, refresh_token, &new_refresh_token) + .await + .unwrap(); + assert!(!refresh_token.is_valid()); + + // Record the user-agent on the session + assert!(session.user_agent.is_none()); + let session = repo + .oauth2_session() + .record_user_agent(session, "Mozilla/5.0".to_owned()) + .await + .unwrap(); + assert_eq!(session.user_agent.as_deref(), Some("Mozilla/5.0")); + + // Reload the session and check the user-agent + let session = repo + .oauth2_session() + .lookup(session.id) + .await + .unwrap() + .expect("session not found"); + assert_eq!(session.user_agent.as_deref(), Some("Mozilla/5.0")); + + // Mark the session as finished + assert!(session.is_valid()); + let session = repo.oauth2_session().finish(&clock, session).await.unwrap(); + assert!(!session.is_valid()); + } + + /// Test the [`OAuth2SessionRepository::list`] and + /// [`OAuth2SessionRepository::count`] methods. + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_list_sessions(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Create two users and their corresponding browser sessions + let user1 = repo + .user() + .add(&mut rng, &clock, "alice".to_owned()) + .await + .unwrap(); + let user1_session = repo + .browser_session() + .add(&mut rng, &clock, &user1, None) + .await + .unwrap(); + + let user2 = repo + .user() + .add(&mut rng, &clock, "bob".to_owned()) + .await + .unwrap(); + let user2_session = repo + .browser_session() + .add(&mut rng, &clock, &user2, None) + .await + .unwrap(); + + // Create two clients + let client1 = repo + .oauth2_client() + .add( + &mut rng, + &clock, + vec!["https://first.example.com/redirect".parse().unwrap()], + None, + None, + None, + vec![GrantType::AuthorizationCode], + Some("First client".to_owned()), + Some("https://first.example.com/logo.png".parse().unwrap()), + Some("https://first.example.com/".parse().unwrap()), + Some("https://first.example.com/policy".parse().unwrap()), + Some("https://first.example.com/tos".parse().unwrap()), + Some("https://first.example.com/jwks.json".parse().unwrap()), + None, + None, + None, + None, + None, + Some("https://first.example.com/login".parse().unwrap()), + ) + .await + .unwrap(); + let client2 = repo + .oauth2_client() + .add( + &mut rng, + &clock, + vec!["https://second.example.com/redirect".parse().unwrap()], + None, + None, + None, + vec![GrantType::AuthorizationCode], + Some("Second client".to_owned()), + Some("https://second.example.com/logo.png".parse().unwrap()), + Some("https://second.example.com/".parse().unwrap()), + Some("https://second.example.com/policy".parse().unwrap()), + Some("https://second.example.com/tos".parse().unwrap()), + Some("https://second.example.com/jwks.json".parse().unwrap()), + None, + None, + None, + None, + None, + Some("https://second.example.com/login".parse().unwrap()), + ) + .await + .unwrap(); + + let scope = Scope::from_iter([OPENID, EMAIL]); + let scope2 = Scope::from_iter([OPENID, PROFILE]); + + // Create two sessions for each user, one with each client + // We're moving the clock forward by 1 minute between each session to ensure + // we're getting consistent ordering in lists. + let session11 = repo + .oauth2_session() + .add_from_browser_session(&mut rng, &clock, &client1, &user1_session, scope.clone()) + .await + .unwrap(); + clock.advance(Duration::try_minutes(1).unwrap()); + + let session12 = repo + .oauth2_session() + .add_from_browser_session(&mut rng, &clock, &client1, &user2_session, scope.clone()) + .await + .unwrap(); + clock.advance(Duration::try_minutes(1).unwrap()); + + let session21 = repo + .oauth2_session() + .add_from_browser_session(&mut rng, &clock, &client2, &user1_session, scope2.clone()) + .await + .unwrap(); + clock.advance(Duration::try_minutes(1).unwrap()); + + let session22 = repo + .oauth2_session() + .add_from_browser_session(&mut rng, &clock, &client2, &user2_session, scope2.clone()) + .await + .unwrap(); + clock.advance(Duration::try_minutes(1).unwrap()); + + // We're also finishing two of the sessions + let session11 = repo + .oauth2_session() + .finish(&clock, session11) + .await + .unwrap(); + let session22 = repo + .oauth2_session() + .finish(&clock, session22) + .await + .unwrap(); + + let pagination = Pagination::first(10); + + // First, list all the sessions + let filter = OAuth2SessionFilter::new().for_any_user(); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 4); + assert_eq!(list.edges[0].node, session11); + assert_eq!(list.edges[1].node, session12); + assert_eq!(list.edges[2].node, session21); + assert_eq!(list.edges[3].node, session22); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 4); + + // Now filter for only one user + let filter = OAuth2SessionFilter::new().for_user(&user1); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 2); + assert_eq!(list.edges[0].node, session11); + assert_eq!(list.edges[1].node, session21); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 2); + + // Filter for only one client + let filter = OAuth2SessionFilter::new().for_client(&client1); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 2); + assert_eq!(list.edges[0].node, session11); + assert_eq!(list.edges[1].node, session12); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 2); + + // Filter for both a user and a client + let filter = OAuth2SessionFilter::new() + .for_user(&user2) + .for_client(&client2); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node, session22); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 1); + + // Filter for active sessions + let filter = OAuth2SessionFilter::new().active_only(); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 2); + assert_eq!(list.edges[0].node, session12); + assert_eq!(list.edges[1].node, session21); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 2); + + // Filter for finished sessions + let filter = OAuth2SessionFilter::new().finished_only(); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 2); + assert_eq!(list.edges[0].node, session11); + assert_eq!(list.edges[1].node, session22); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 2); + + // Combine the finished filter with the user filter + let filter = OAuth2SessionFilter::new().finished_only().for_user(&user2); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node, session22); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 1); + + // Combine the finished filter with the client filter + let filter = OAuth2SessionFilter::new() + .finished_only() + .for_client(&client2); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node, session22); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 1); + + // Combine the active filter with the user filter + let filter = OAuth2SessionFilter::new().active_only().for_user(&user2); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node, session12); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 1); + + // Combine the active filter with the client filter + let filter = OAuth2SessionFilter::new() + .active_only() + .for_client(&client2); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node, session21); + + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 1); + + // Try the scope filter. We should get all sessions with the "openid" scope + let scope = Scope::from_iter([OPENID]); + let filter = OAuth2SessionFilter::new().with_scope(&scope); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 4); + assert_eq!(list.edges[0].node, session11); + assert_eq!(list.edges[1].node, session12); + assert_eq!(list.edges[2].node, session21); + assert_eq!(list.edges[3].node, session22); + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 4); + + // We should get all sessions with the "openid" and "email" scope + let scope = Scope::from_iter([OPENID, EMAIL]); + let filter = OAuth2SessionFilter::new().with_scope(&scope); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert!(!list.has_next_page); + assert_eq!(list.edges.len(), 2); + assert_eq!(list.edges[0].node, session11); + assert_eq!(list.edges[1].node, session12); + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 2); + + // Try combining the scope filter with the user filter + let filter = OAuth2SessionFilter::new() + .with_scope(&scope) + .for_user(&user1); + let list = repo + .oauth2_session() + .list(filter, pagination) + .await + .unwrap(); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node, session11); + assert_eq!(repo.oauth2_session().count(filter).await.unwrap(), 1); + + // Finish all sessions of a client in batch + let affected = repo + .oauth2_session() + .finish_bulk( + &clock, + OAuth2SessionFilter::new() + .for_client(&client1) + .active_only(), + ) + .await + .unwrap(); + assert_eq!(affected, 1); + + // We should have 3 finished sessions + assert_eq!( + repo.oauth2_session() + .count(OAuth2SessionFilter::new().finished_only()) + .await + .unwrap(), + 3 + ); + + // We should have 1 active sessions + assert_eq!( + repo.oauth2_session() + .count(OAuth2SessionFilter::new().active_only()) + .await + .unwrap(), + 1 + ); + } + + /// Test the [`OAuth2DeviceCodeGrantRepository`] implementation + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_device_code_grant_repository(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Provision a client + let client = repo + .oauth2_client() + .add( + &mut rng, + &clock, + vec!["https://example.com/redirect".parse().unwrap()], + None, + None, + None, + vec![GrantType::AuthorizationCode], + Some("Example".to_owned()), + Some("https://example.com/logo.png".parse().unwrap()), + Some("https://example.com/".parse().unwrap()), + Some("https://example.com/policy".parse().unwrap()), + Some("https://example.com/tos".parse().unwrap()), + Some("https://example.com/jwks.json".parse().unwrap()), + None, + None, + None, + None, + None, + Some("https://example.com/login".parse().unwrap()), + ) + .await + .unwrap(); + + // Provision a user + let user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + + // Provision a browser session + let browser_session = repo + .browser_session() + .add(&mut rng, &clock, &user, None) + .await + .unwrap(); + + let user_code = "usercode"; + let device_code = "devicecode"; + let scope = Scope::from_iter([OPENID, EMAIL]); + + // Create a device code grant + let grant = repo + .oauth2_device_code_grant() + .add( + &mut rng, + &clock, + OAuth2DeviceCodeGrantParams { + client: &client, + scope: scope.clone(), + device_code: device_code.to_owned(), + user_code: user_code.to_owned(), + expires_in: Duration::try_minutes(5).unwrap(), + ip_address: None, + user_agent: None, + }, + ) + .await + .unwrap(); + + assert!(grant.is_pending()); + + // Check that we can find the grant by ID + let id = grant.id; + let lookup = repo.oauth2_device_code_grant().lookup(id).await.unwrap(); + assert_eq!(lookup.as_ref(), Some(&grant)); + + // Check that we can find the grant by device code + let lookup = repo + .oauth2_device_code_grant() + .find_by_device_code(device_code) + .await + .unwrap(); + assert_eq!(lookup.as_ref(), Some(&grant)); + + // Check that we can find the grant by user code + let lookup = repo + .oauth2_device_code_grant() + .find_by_user_code(user_code) + .await + .unwrap(); + assert_eq!(lookup.as_ref(), Some(&grant)); + + // Let's mark it as fulfilled + let grant = repo + .oauth2_device_code_grant() + .fulfill(&clock, grant, &browser_session) + .await + .unwrap(); + assert!(!grant.is_pending()); + assert!(grant.is_fulfilled()); + + // Check that we can't mark it as rejected now + let res = repo + .oauth2_device_code_grant() + .reject(&clock, grant, &browser_session) + .await; + assert!(res.is_err()); + + // Look it up again + let grant = repo + .oauth2_device_code_grant() + .lookup(id) + .await + .unwrap() + .unwrap(); + + // We can't mark it as fulfilled again + let res = repo + .oauth2_device_code_grant() + .fulfill(&clock, grant, &browser_session) + .await; + assert!(res.is_err()); + + // Look it up again + let grant = repo + .oauth2_device_code_grant() + .lookup(id) + .await + .unwrap() + .unwrap(); + + // Create an OAuth 2.0 session + let session = repo + .oauth2_session() + .add_from_browser_session(&mut rng, &clock, &client, &browser_session, scope.clone()) + .await + .unwrap(); + + // We can mark it as exchanged + let grant = repo + .oauth2_device_code_grant() + .exchange(&clock, grant, &session) + .await + .unwrap(); + assert!(!grant.is_pending()); + assert!(!grant.is_fulfilled()); + assert!(grant.is_exchanged()); + + // We can't mark it as exchanged again + let res = repo + .oauth2_device_code_grant() + .exchange(&clock, grant, &session) + .await; + assert!(res.is_err()); + + // Do a new grant to reject it + let grant = repo + .oauth2_device_code_grant() + .add( + &mut rng, + &clock, + OAuth2DeviceCodeGrantParams { + client: &client, + scope: scope.clone(), + device_code: "second_devicecode".to_owned(), + user_code: "second_usercode".to_owned(), + expires_in: Duration::try_minutes(5).unwrap(), + ip_address: None, + user_agent: None, + }, + ) + .await + .unwrap(); + + let id = grant.id; + + // We can mark it as rejected + let grant = repo + .oauth2_device_code_grant() + .reject(&clock, grant, &browser_session) + .await + .unwrap(); + assert!(!grant.is_pending()); + assert!(grant.is_rejected()); + + // We can't mark it as rejected again + let res = repo + .oauth2_device_code_grant() + .reject(&clock, grant, &browser_session) + .await; + assert!(res.is_err()); + + // Look it up again + let grant = repo + .oauth2_device_code_grant() + .lookup(id) + .await + .unwrap() + .unwrap(); + + // We can't mark it as fulfilled + let res = repo + .oauth2_device_code_grant() + .fulfill(&clock, grant, &browser_session) + .await; + assert!(res.is_err()); + + // Look it up again + let grant = repo + .oauth2_device_code_grant() + .lookup(id) + .await + .unwrap() + .unwrap(); + + // We can't mark it as exchanged + let res = repo + .oauth2_device_code_grant() + .exchange(&clock, grant, &session) + .await; + assert!(res.is_err()); + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/oauth2/refresh_token.rs b/matrix-authentication-service/crates/storage-pg/src/oauth2/refresh_token.rs new file mode 100644 index 00000000..cdf328bb --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/oauth2/refresh_token.rs @@ -0,0 +1,400 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{AccessToken, Clock, RefreshToken, RefreshTokenState, Session}; +use mas_storage::oauth2::OAuth2RefreshTokenRepository; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, DatabaseInconsistencyError, tracing::ExecuteExt}; + +/// An implementation of [`OAuth2RefreshTokenRepository`] for a PostgreSQL +/// connection +pub struct PgOAuth2RefreshTokenRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgOAuth2RefreshTokenRepository<'c> { + /// Create a new [`PgOAuth2RefreshTokenRepository`] from an active + /// PostgreSQL connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct OAuth2RefreshTokenLookup { + oauth2_refresh_token_id: Uuid, + refresh_token: String, + created_at: DateTime, + consumed_at: Option>, + revoked_at: Option>, + oauth2_access_token_id: Option, + oauth2_session_id: Uuid, + next_oauth2_refresh_token_id: Option, +} + +impl TryFrom for RefreshToken { + type Error = DatabaseInconsistencyError; + + fn try_from(value: OAuth2RefreshTokenLookup) -> Result { + let id = value.oauth2_refresh_token_id.into(); + let state = match ( + value.revoked_at, + value.consumed_at, + value.next_oauth2_refresh_token_id, + ) { + (None, None, None) => RefreshTokenState::Valid, + (Some(revoked_at), None, None) => RefreshTokenState::Revoked { revoked_at }, + (None, Some(consumed_at), None) => RefreshTokenState::Consumed { + consumed_at, + next_refresh_token_id: None, + }, + (None, Some(consumed_at), Some(id)) => RefreshTokenState::Consumed { + consumed_at, + next_refresh_token_id: Some(Ulid::from(id)), + }, + _ => { + return Err(DatabaseInconsistencyError::on("oauth2_refresh_tokens") + .column("next_oauth2_refresh_token_id") + .row(id)); + } + }; + + Ok(RefreshToken { + id, + state, + session_id: value.oauth2_session_id.into(), + refresh_token: value.refresh_token, + created_at: value.created_at, + access_token_id: value.oauth2_access_token_id.map(Ulid::from), + }) + } +} + +#[async_trait] +impl OAuth2RefreshTokenRepository for PgOAuth2RefreshTokenRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.oauth2_refresh_token.lookup", + skip_all, + fields( + db.query.text, + refresh_token.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2RefreshTokenLookup, + r#" + SELECT oauth2_refresh_token_id + , refresh_token + , created_at + , consumed_at + , revoked_at + , oauth2_access_token_id + , oauth2_session_id + , next_oauth2_refresh_token_id + FROM oauth2_refresh_tokens + + WHERE oauth2_refresh_token_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_refresh_token.find_by_token", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn find_by_token( + &mut self, + refresh_token: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuth2RefreshTokenLookup, + r#" + SELECT oauth2_refresh_token_id + , refresh_token + , created_at + , consumed_at + , revoked_at + , oauth2_access_token_id + , oauth2_session_id + , next_oauth2_refresh_token_id + FROM oauth2_refresh_tokens + + WHERE refresh_token = $1 + "#, + refresh_token, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_refresh_token.add", + skip_all, + fields( + db.query.text, + %session.id, + client.id = %session.client_id, + refresh_token.id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + session: &Session, + access_token: &AccessToken, + refresh_token: String, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("refresh_token.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO oauth2_refresh_tokens + (oauth2_refresh_token_id, oauth2_session_id, oauth2_access_token_id, + refresh_token, created_at) + VALUES + ($1, $2, $3, $4, $5) + "#, + Uuid::from(id), + Uuid::from(session.id), + Uuid::from(access_token.id), + refresh_token, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(RefreshToken { + id, + state: RefreshTokenState::default(), + session_id: session.id, + refresh_token, + access_token_id: Some(access_token.id), + created_at, + }) + } + + #[tracing::instrument( + name = "db.oauth2_refresh_token.consume", + skip_all, + fields( + db.query.text, + %refresh_token.id, + session.id = %refresh_token.session_id, + ), + err, + )] + async fn consume( + &mut self, + clock: &dyn Clock, + refresh_token: RefreshToken, + replaced_by: &RefreshToken, + ) -> Result { + let consumed_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE oauth2_refresh_tokens + SET consumed_at = $2, + next_oauth2_refresh_token_id = $3 + WHERE oauth2_refresh_token_id = $1 + "#, + Uuid::from(refresh_token.id), + consumed_at, + Uuid::from(replaced_by.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + refresh_token + .consume(consumed_at, replaced_by) + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.oauth2_refresh_token.revoke", + skip_all, + fields( + db.query.text, + %refresh_token.id, + session.id = %refresh_token.session_id, + ), + err, + )] + async fn revoke( + &mut self, + clock: &dyn Clock, + refresh_token: RefreshToken, + ) -> Result { + let revoked_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE oauth2_refresh_tokens + SET revoked_at = $2 + WHERE oauth2_refresh_token_id = $1 + "#, + Uuid::from(refresh_token.id), + revoked_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + refresh_token + .revoke(revoked_at) + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.oauth2_refresh_token.cleanup_revoked", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn cleanup_revoked( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH + to_delete AS ( + SELECT oauth2_refresh_token_id + FROM oauth2_refresh_tokens + WHERE revoked_at IS NOT NULL + AND ($1::timestamptz IS NULL OR revoked_at >= $1::timestamptz) + AND revoked_at < $2::timestamptz + ORDER BY revoked_at ASC + LIMIT $3 + FOR UPDATE + ), + + deleted AS ( + DELETE FROM oauth2_refresh_tokens + USING to_delete + WHERE oauth2_refresh_tokens.oauth2_refresh_token_id = to_delete.oauth2_refresh_token_id + RETURNING oauth2_refresh_tokens.revoked_at + ) + + SELECT + COUNT(*) as "count!", + MAX(revoked_at) as last_revoked_at + FROM deleted + "#, + since, + until, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_revoked_at, + )) + } + + #[tracing::instrument( + name = "db.oauth2_refresh_token.cleanup_consumed", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn cleanup_consumed( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + // We only consider a token as consumed if also the next token has its + // `consumed_at` set. This makes the query a bit expensive to compute, + // but is optimised to two index scans and a nested join using the + // `oauth2_refresh_token_not_consumed_idx` and + // `oauth2_refresh_token_consumed_at_idx` indexes. + let res = sqlx::query!( + r#" + WITH + to_delete AS ( + SELECT rts_to_del.oauth2_refresh_token_id + FROM oauth2_refresh_tokens rts_to_del + LEFT JOIN oauth2_refresh_tokens next_rts + ON rts_to_del.next_oauth2_refresh_token_id = next_rts.oauth2_refresh_token_id + WHERE rts_to_del.consumed_at IS NOT NULL + AND (rts_to_del.next_oauth2_refresh_token_id IS NULL OR next_rts.consumed_at IS NOT NULL) + AND ($1::timestamptz IS NULL OR rts_to_del.consumed_at >= $1::timestamptz) + AND rts_to_del.consumed_at < $2::timestamptz + ORDER BY rts_to_del.consumed_at ASC + LIMIT $3 + ), + + deleted AS ( + DELETE FROM oauth2_refresh_tokens + USING to_delete + WHERE oauth2_refresh_tokens.oauth2_refresh_token_id = to_delete.oauth2_refresh_token_id + RETURNING oauth2_refresh_tokens.consumed_at + ) + + SELECT + COUNT(*) as "count!", + MAX(consumed_at) as last_consumed_at + FROM deleted + "#, + since, + until, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_consumed_at, + )) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/oauth2/session.rs b/matrix-authentication-service/crates/storage-pg/src/oauth2/session.rs new file mode 100644 index 00000000..e1379f74 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/oauth2/session.rs @@ -0,0 +1,708 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{BrowserSession, Client, Clock, Session, SessionState, User}; +use mas_storage::{ + Page, Pagination, + oauth2::{OAuth2SessionFilter, OAuth2SessionRepository}, + pagination::Node, +}; +use oauth2_types::scope::{Scope, ScopeToken}; +use rand::RngCore; +use sea_query::{ + Condition, Expr, PgFunc, PostgresQueryBuilder, Query, SimpleExpr, enum_def, + extension::postgres::PgExpr, +}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseError, DatabaseInconsistencyError, + filter::{Filter, StatementExt}, + iden::{OAuth2Clients, OAuth2Sessions, UserSessions}, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +/// An implementation of [`OAuth2SessionRepository`] for a PostgreSQL connection +pub struct PgOAuth2SessionRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgOAuth2SessionRepository<'c> { + /// Create a new [`PgOAuth2SessionRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[derive(sqlx::FromRow)] +#[enum_def] +struct OAuthSessionLookup { + oauth2_session_id: Uuid, + user_id: Option, + user_session_id: Option, + oauth2_client_id: Uuid, + scope_list: Vec, + created_at: DateTime, + finished_at: Option>, + user_agent: Option, + last_active_at: Option>, + last_active_ip: Option, + human_name: Option, +} + +impl Node for OAuthSessionLookup { + fn cursor(&self) -> Ulid { + self.oauth2_session_id.into() + } +} + +impl TryFrom for Session { + type Error = DatabaseInconsistencyError; + + fn try_from(value: OAuthSessionLookup) -> Result { + let id = Ulid::from(value.oauth2_session_id); + let scope: Result = value + .scope_list + .iter() + .map(|s| s.parse::()) + .collect(); + let scope = scope.map_err(|e| { + DatabaseInconsistencyError::on("oauth2_sessions") + .column("scope") + .row(id) + .source(e) + })?; + + let state = match value.finished_at { + None => SessionState::Valid, + Some(finished_at) => SessionState::Finished { finished_at }, + }; + + Ok(Session { + id, + state, + created_at: value.created_at, + client_id: value.oauth2_client_id.into(), + user_id: value.user_id.map(Ulid::from), + user_session_id: value.user_session_id.map(Ulid::from), + scope, + user_agent: value.user_agent, + last_active_at: value.last_active_at, + last_active_ip: value.last_active_ip, + human_name: value.human_name, + }) + } +} + +impl Filter for OAuth2SessionFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.user().map(|user| { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserId)).eq(Uuid::from(user.id)) + })) + .add_option(self.client().map(|client| { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::OAuth2ClientId)) + .eq(Uuid::from(client.id)) + })) + .add_option(self.client_kind().map(|client_kind| { + // This builds either a: + // `WHERE oauth2_client_id = ANY(...)` + // or a `WHERE oauth2_client_id <> ALL(...)` + let static_clients = Query::select() + .expr(Expr::col(( + OAuth2Clients::Table, + OAuth2Clients::OAuth2ClientId, + ))) + .and_where(Expr::col((OAuth2Clients::Table, OAuth2Clients::IsStatic)).into()) + .from(OAuth2Clients::Table) + .take(); + if client_kind.is_static() { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::OAuth2ClientId)) + .eq(Expr::any(static_clients)) + } else { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::OAuth2ClientId)) + .ne(Expr::all(static_clients)) + } + })) + .add_option(self.device().map(|device| -> SimpleExpr { + if let Ok([stable_scope_token, unstable_scope_token]) = device.to_scope_token() { + Condition::any() + .add( + Expr::val(stable_scope_token.to_string()).eq(PgFunc::any(Expr::col(( + OAuth2Sessions::Table, + OAuth2Sessions::ScopeList, + )))), + ) + .add(Expr::val(unstable_scope_token.to_string()).eq(PgFunc::any( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::ScopeList)), + ))) + .into() + } else { + // If the device ID can't be encoded as a scope token, match no rows + Expr::val(false).into() + } + })) + .add_option(self.browser_session().map(|browser_session| { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserSessionId)) + .eq(Uuid::from(browser_session.id)) + })) + .add_option(self.browser_session_filter().map(|browser_session_filter| { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserSessionId)).in_subquery( + Query::select() + .expr(Expr::col(( + UserSessions::Table, + UserSessions::UserSessionId, + ))) + .apply_filter(browser_session_filter) + .from(UserSessions::Table) + .take(), + ) + })) + .add_option(self.state().map(|state| { + if state.is_active() { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::FinishedAt)).is_null() + } else { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::FinishedAt)).is_not_null() + } + })) + .add_option(self.scope().map(|scope| { + let scope: Vec = scope.iter().map(|s| s.as_str().to_owned()).collect(); + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::ScopeList)).contains(scope) + })) + .add_option(self.any_user().map(|any_user| { + if any_user { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserId)).is_not_null() + } else { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserId)).is_null() + } + })) + .add_option(self.last_active_after().map(|last_active_after| { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::LastActiveAt)) + .gt(last_active_after) + })) + .add_option(self.last_active_before().map(|last_active_before| { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::LastActiveAt)) + .lt(last_active_before) + })) + } +} + +#[async_trait] +impl OAuth2SessionRepository for PgOAuth2SessionRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.oauth2_session.lookup", + skip_all, + fields( + db.query.text, + session.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + OAuthSessionLookup, + r#" + SELECT oauth2_session_id + , user_id + , user_session_id + , oauth2_client_id + , scope_list + , created_at + , finished_at + , user_agent + , last_active_at + , last_active_ip as "last_active_ip: IpAddr" + , human_name + FROM oauth2_sessions + + WHERE oauth2_session_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(session) = res else { return Ok(None) }; + + Ok(Some(session.try_into()?)) + } + + #[tracing::instrument( + name = "db.oauth2_session.add", + skip_all, + fields( + db.query.text, + %client.id, + session.id, + session.scope = %scope, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + user: Option<&User>, + user_session: Option<&BrowserSession>, + scope: Scope, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("session.id", tracing::field::display(id)); + + let scope_list: Vec = scope.iter().map(|s| s.as_str().to_owned()).collect(); + + sqlx::query!( + r#" + INSERT INTO oauth2_sessions + ( oauth2_session_id + , user_id + , user_session_id + , oauth2_client_id + , scope_list + , created_at + ) + VALUES ($1, $2, $3, $4, $5, $6) + "#, + Uuid::from(id), + user.map(|u| Uuid::from(u.id)), + user_session.map(|s| Uuid::from(s.id)), + Uuid::from(client.id), + &scope_list, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(Session { + id, + state: SessionState::Valid, + created_at, + user_id: user.map(|u| u.id), + user_session_id: user_session.map(|s| s.id), + client_id: client.id, + scope, + user_agent: None, + last_active_at: None, + last_active_ip: None, + human_name: None, + }) + } + + #[tracing::instrument( + name = "db.oauth2_session.finish_bulk", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn finish_bulk( + &mut self, + clock: &dyn Clock, + filter: OAuth2SessionFilter<'_>, + ) -> Result { + let finished_at = clock.now(); + let (sql, arguments) = Query::update() + .table(OAuth2Sessions::Table) + .value(OAuth2Sessions::FinishedAt, finished_at) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let res = sqlx::query_with(&sql, arguments) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(res.rows_affected().try_into().unwrap_or(usize::MAX)) + } + + #[tracing::instrument( + name = "db.oauth2_session.finish", + skip_all, + fields( + db.query.text, + %session.id, + %session.scope, + client.id = %session.client_id, + ), + err, + )] + async fn finish( + &mut self, + clock: &dyn Clock, + session: Session, + ) -> Result { + let finished_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE oauth2_sessions + SET finished_at = $2 + WHERE oauth2_session_id = $1 + "#, + Uuid::from(session.id), + finished_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + session + .finish(finished_at) + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.oauth2_session.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: OAuth2SessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::OAuth2SessionId)), + OAuthSessionLookupIden::Oauth2SessionId, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserId)), + OAuthSessionLookupIden::UserId, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserSessionId)), + OAuthSessionLookupIden::UserSessionId, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::OAuth2ClientId)), + OAuthSessionLookupIden::Oauth2ClientId, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::ScopeList)), + OAuthSessionLookupIden::ScopeList, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::CreatedAt)), + OAuthSessionLookupIden::CreatedAt, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::FinishedAt)), + OAuthSessionLookupIden::FinishedAt, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserAgent)), + OAuthSessionLookupIden::UserAgent, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::LastActiveAt)), + OAuthSessionLookupIden::LastActiveAt, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::LastActiveIp)), + OAuthSessionLookupIden::LastActiveIp, + ) + .expr_as( + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::HumanName)), + OAuthSessionLookupIden::HumanName, + ) + .from(OAuth2Sessions::Table) + .apply_filter(filter) + .generate_pagination( + (OAuth2Sessions::Table, OAuth2Sessions::OAuth2SessionId), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination.process(edges).try_map(Session::try_from)?; + + Ok(page) + } + + #[tracing::instrument( + name = "db.oauth2_session.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count(&mut self, filter: OAuth2SessionFilter<'_>) -> Result { + let (sql, arguments) = Query::select() + .expr(Expr::col((OAuth2Sessions::Table, OAuth2Sessions::OAuth2SessionId)).count()) + .from(OAuth2Sessions::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.oauth2_session.record_batch_activity", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn record_batch_activity( + &mut self, + mut activities: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error> { + // Sort the activity by ID, so that when batching the updates, Postgres + // locks the rows in a stable order, preventing deadlocks + activities.sort_unstable(); + let mut ids = Vec::with_capacity(activities.len()); + let mut last_activities = Vec::with_capacity(activities.len()); + let mut ips = Vec::with_capacity(activities.len()); + + for (id, last_activity, ip) in activities { + ids.push(Uuid::from(id)); + last_activities.push(last_activity); + ips.push(ip); + } + + let res = sqlx::query!( + r#" + UPDATE oauth2_sessions + SET last_active_at = GREATEST(t.last_active_at, oauth2_sessions.last_active_at) + , last_active_ip = COALESCE(t.last_active_ip, oauth2_sessions.last_active_ip) + FROM ( + SELECT * + FROM UNNEST($1::uuid[], $2::timestamptz[], $3::inet[]) + AS t(oauth2_session_id, last_active_at, last_active_ip) + ) AS t + WHERE oauth2_sessions.oauth2_session_id = t.oauth2_session_id + "#, + &ids, + &last_activities, + &ips as &[Option], + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, ids.len().try_into().unwrap_or(u64::MAX))?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.oauth2_session.record_user_agent", + skip_all, + fields( + db.query.text, + %session.id, + %session.scope, + client.id = %session.client_id, + session.user_agent = user_agent, + ), + err, + )] + async fn record_user_agent( + &mut self, + mut session: Session, + user_agent: String, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE oauth2_sessions + SET user_agent = $2 + WHERE oauth2_session_id = $1 + "#, + Uuid::from(session.id), + &*user_agent, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + session.user_agent = Some(user_agent); + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(session) + } + + #[tracing::instrument( + name = "repository.oauth2_session.set_human_name", + skip(self), + fields( + client.id = %session.client_id, + session.human_name = ?human_name, + ), + err, + )] + async fn set_human_name( + &mut self, + mut session: Session, + human_name: Option, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE oauth2_sessions + SET human_name = $2 + WHERE oauth2_session_id = $1 + "#, + Uuid::from(session.id), + human_name.as_deref(), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + session.human_name = human_name; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(session) + } + + #[tracing::instrument( + name = "db.oauth2_session.cleanup_finished", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup_finished( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH + to_delete AS ( + SELECT oauth2_session_id, finished_at + FROM oauth2_sessions + WHERE finished_at IS NOT NULL + AND ($1::timestamptz IS NULL OR finished_at >= $1) + AND finished_at < $2 + ORDER BY finished_at ASC + LIMIT $3 + FOR UPDATE + ), + deleted_refresh_tokens AS ( + DELETE FROM oauth2_refresh_tokens USING to_delete + WHERE oauth2_refresh_tokens.oauth2_session_id = to_delete.oauth2_session_id + ), + deleted_access_tokens AS ( + DELETE FROM oauth2_access_tokens USING to_delete + WHERE oauth2_access_tokens.oauth2_session_id = to_delete.oauth2_session_id + ), + deleted_sessions AS ( + DELETE FROM oauth2_sessions USING to_delete + WHERE oauth2_sessions.oauth2_session_id = to_delete.oauth2_session_id + RETURNING oauth2_sessions.finished_at + ) + SELECT COUNT(*) as "count!", MAX(finished_at) as last_finished_at FROM deleted_sessions + "#, + since, + until, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_finished_at, + )) + } + + #[tracing::instrument( + name = "db.oauth2_session.cleanup_inactive_ips", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + threshold = %threshold, + limit = limit, + ), + err, + )] + async fn cleanup_inactive_ips( + &mut self, + since: Option>, + threshold: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH to_update AS ( + SELECT oauth2_session_id, last_active_at + FROM oauth2_sessions + WHERE last_active_ip IS NOT NULL + AND last_active_at IS NOT NULL + AND ($1::timestamptz IS NULL OR last_active_at >= $1) + AND last_active_at < $2 + ORDER BY last_active_at ASC + LIMIT $3 + FOR UPDATE + ), + updated AS ( + UPDATE oauth2_sessions + SET last_active_ip = NULL + FROM to_update + WHERE oauth2_sessions.oauth2_session_id = to_update.oauth2_session_id + RETURNING oauth2_sessions.last_active_at + ) + SELECT COUNT(*) AS "count!", MAX(last_active_at) AS last_active_at FROM updated + "#, + since, + threshold, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_active_at, + )) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/pagination.rs b/matrix-authentication-service/crates/storage-pg/src/pagination.rs new file mode 100644 index 00000000..8e83c237 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/pagination.rs @@ -0,0 +1,65 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Utilities to manage paginated queries. + +use mas_storage::{Pagination, pagination::PaginationDirection}; +use sea_query::IntoColumnRef; +use uuid::Uuid; + +/// An extension trait to the `sqlx` [`QueryBuilder`], to help adding pagination +/// to a query +pub trait QueryBuilderExt { + /// Add cursor-based pagination to a query, as used in paginated GraphQL + /// connections + fn generate_pagination( + &mut self, + column: C, + pagination: Pagination, + ) -> &mut Self; +} + +impl QueryBuilderExt for sea_query::SelectStatement { + fn generate_pagination( + &mut self, + column: C, + pagination: Pagination, + ) -> &mut Self { + let id_field = column.into_column_ref(); + + // ref: https://github.com/graphql/graphql-relay-js/issues/94#issuecomment-232410564 + // 1. Start from the greedy query: SELECT * FROM table + + // 2. If the after argument is provided, add `id > parsed_cursor` to the `WHERE` + // clause + if let Some(after) = pagination.after { + self.and_where(sea_query::Expr::col(id_field.clone()).gt(Uuid::from(after))); + } + + // 3. If the before argument is provided, add `id < parsed_cursor` to the + // `WHERE` clause + if let Some(before) = pagination.before { + self.and_where(sea_query::Expr::col(id_field.clone()).lt(Uuid::from(before))); + } + + match pagination.direction { + // 4. If the first argument is provided, add `ORDER BY id ASC LIMIT first+1` to the + // query + PaginationDirection::Forward => { + self.order_by(id_field, sea_query::Order::Asc) + .limit((pagination.count + 1) as u64); + } + // 5. If the first argument is provided, add `ORDER BY id DESC LIMIT last+1` to the + // query + PaginationDirection::Backward => { + self.order_by(id_field, sea_query::Order::Desc) + .limit((pagination.count + 1) as u64); + } + } + + self + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/personal/access_token.rs b/matrix-authentication-service/crates/storage-pg/src/personal/access_token.rs new file mode 100644 index 00000000..db8164fe --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/personal/access_token.rs @@ -0,0 +1,253 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + Clock, + personal::{PersonalAccessToken, session::PersonalSession}, +}; +use mas_storage::personal::PersonalAccessTokenRepository; +use rand::RngCore; +use sha2::{Digest, Sha256}; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, tracing::ExecuteExt as _}; + +/// An implementation of [`PersonalAccessTokenRepository`] for a PostgreSQL +/// connection +pub struct PgPersonalAccessTokenRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgPersonalAccessTokenRepository<'c> { + /// Create a new [`PgPersonalAccessTokenRepository`] from an active + /// PostgreSQL connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct PersonalAccessTokenLookup { + personal_access_token_id: Uuid, + personal_session_id: Uuid, + created_at: DateTime, + expires_at: Option>, + revoked_at: Option>, +} + +impl From for PersonalAccessToken { + fn from(value: PersonalAccessTokenLookup) -> Self { + Self { + id: Ulid::from(value.personal_access_token_id), + session_id: Ulid::from(value.personal_session_id), + created_at: value.created_at, + expires_at: value.expires_at, + revoked_at: value.revoked_at, + } + } +} + +#[async_trait] +impl PersonalAccessTokenRepository for PgPersonalAccessTokenRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.personal_access_token.lookup", + skip_all, + fields( + db.query.text, + personal_access_token.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + PersonalAccessTokenLookup, + r#" + SELECT personal_access_token_id + , personal_session_id + , created_at + , expires_at + , revoked_at + + FROM personal_access_tokens + + WHERE personal_access_token_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.personal_access_token.find_by_token", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn find_by_token( + &mut self, + access_token: &str, + ) -> Result, Self::Error> { + let token_sha256 = Sha256::digest(access_token.as_bytes()).to_vec(); + + let res = sqlx::query_as!( + PersonalAccessTokenLookup, + r#" + SELECT personal_access_token_id + , personal_session_id + , created_at + , expires_at + , revoked_at + + FROM personal_access_tokens + + WHERE access_token_sha256 = $1 + "#, + &token_sha256, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.personal_access_token.find_active_for_session", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn find_active_for_session( + &mut self, + session: &PersonalSession, + ) -> Result, Self::Error> { + let res: Option = sqlx::query_as!( + PersonalAccessTokenLookup, + r#" + SELECT personal_access_token_id + , personal_session_id + , created_at + , expires_at + , revoked_at + + FROM personal_access_tokens + + WHERE personal_session_id = $1 + AND revoked_at IS NULL + "#, + Uuid::from(session.id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.personal_access_token.add", + skip_all, + fields( + db.query.text, + personal_access_token.id, + %session.id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + session: &PersonalSession, + access_token: &str, + expires_after: Option, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("personal_access_token.id", tracing::field::display(id)); + + let token_sha256 = Sha256::digest(access_token.as_bytes()).to_vec(); + + let expires_at = expires_after.map(|expires_after| created_at + expires_after); + + sqlx::query!( + r#" + INSERT INTO personal_access_tokens + (personal_access_token_id, personal_session_id, access_token_sha256, created_at, expires_at) + VALUES ($1, $2, $3, $4, $5) + "#, + Uuid::from(id), + Uuid::from(session.id), + &token_sha256, + created_at, + expires_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(PersonalAccessToken { + id, + session_id: session.id, + created_at, + expires_at, + revoked_at: None, + }) + } + + #[tracing::instrument( + name = "db.personal_access_token.revoke", + skip_all, + fields( + db.query.text, + %access_token.id, + personal_session.id = %access_token.session_id, + ), + err, + )] + async fn revoke( + &mut self, + clock: &dyn Clock, + mut access_token: PersonalAccessToken, + ) -> Result { + let revoked_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE personal_access_tokens + SET revoked_at = $2 + WHERE personal_access_token_id = $1 + "#, + Uuid::from(access_token.id), + revoked_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + access_token.revoked_at = Some(revoked_at); + Ok(access_token) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/personal/mod.rs b/matrix-authentication-service/crates/storage-pg/src/personal/mod.rs new file mode 100644 index 00000000..f540a6be --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/personal/mod.rs @@ -0,0 +1,422 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing the PostgreSQL implementations of the +//! Personal Access Token / Personal Session repositories + +mod access_token; +mod session; + +pub use access_token::PgPersonalAccessTokenRepository; +pub use session::PgPersonalSessionRepository; + +#[cfg(test)] +mod tests { + use chrono::Duration; + use mas_data_model::{ + Clock, Device, clock::MockClock, personal::session::PersonalSessionOwner, + }; + use mas_storage::{ + Pagination, RepositoryAccess, + personal::{ + PersonalAccessTokenRepository, PersonalSessionFilter, PersonalSessionRepository, + }, + user::UserRepository, + }; + use oauth2_types::scope::{OPENID, PROFILE, Scope}; + use rand::SeedableRng; + use rand_chacha::ChaChaRng; + use sqlx::PgPool; + + use crate::PgRepository; + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_session_repository(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + + // Create a user + let admin_user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + let bot_user = repo + .user() + .add(&mut rng, &clock, "marvin".to_owned()) + .await + .unwrap(); + + let all = PersonalSessionFilter::new().for_actor_user(&bot_user); + let active = all.active_only(); + let finished = all.finished_only(); + let pagination = Pagination::first(10); + + assert_eq!(repo.personal_session().count(all).await.unwrap(), 0); + assert_eq!(repo.personal_session().count(active).await.unwrap(), 0); + assert_eq!(repo.personal_session().count(finished).await.unwrap(), 0); + + // We start off with no sessions + let full_list = repo.personal_session().list(all, pagination).await.unwrap(); + assert!(full_list.edges.is_empty()); + let active_list = repo + .personal_session() + .list(active, pagination) + .await + .unwrap(); + assert!(active_list.edges.is_empty()); + let finished_list = repo + .personal_session() + .list(finished, pagination) + .await + .unwrap(); + assert!(finished_list.edges.is_empty()); + + // Start a personal session for that user + let device = Device::generate(&mut rng); + let scope: Scope = [OPENID, PROFILE] + .into_iter() + .chain(device.to_scope_token().unwrap()) + .collect(); + let session = repo + .personal_session() + .add( + &mut rng, + &clock, + (&admin_user).into(), + &bot_user, + "Test Personal Session".to_owned(), + scope.clone(), + ) + .await + .unwrap(); + assert_eq!(session.owner, PersonalSessionOwner::User(admin_user.id)); + assert_eq!(session.actor_user_id, bot_user.id); + assert!(session.is_valid()); + assert!(!session.is_revoked()); + assert_eq!(session.scope, scope); + + assert_eq!(repo.personal_session().count(all).await.unwrap(), 1); + assert_eq!(repo.personal_session().count(active).await.unwrap(), 1); + assert_eq!(repo.personal_session().count(finished).await.unwrap(), 0); + + let full_list = repo.personal_session().list(all, pagination).await.unwrap(); + assert_eq!(full_list.edges.len(), 1); + assert_eq!(full_list.edges[0].node.0.id, session.id); + assert!(full_list.edges[0].node.0.is_valid()); + let active_list = repo + .personal_session() + .list(active, pagination) + .await + .unwrap(); + assert_eq!(active_list.edges.len(), 1); + assert_eq!(active_list.edges[0].node.0.id, session.id); + assert!(active_list.edges[0].node.0.is_valid()); + let finished_list = repo + .personal_session() + .list(finished, pagination) + .await + .unwrap(); + assert!(finished_list.edges.is_empty()); + + // Lookup the session and check it didn't change + let session_lookup = repo + .personal_session() + .lookup(session.id) + .await + .unwrap() + .expect("personal session not found"); + assert_eq!(session_lookup.id, session.id); + assert_eq!( + session_lookup.owner, + PersonalSessionOwner::User(admin_user.id) + ); + assert_eq!(session_lookup.actor_user_id, bot_user.id); + assert_eq!(session_lookup.scope, scope); + assert!(session_lookup.is_valid()); + assert!(!session_lookup.is_revoked()); + + // Revoke the session + let session = repo + .personal_session() + .revoke(&clock, session) + .await + .unwrap(); + assert!(!session.is_valid()); + assert!(session.is_revoked()); + + assert_eq!(repo.personal_session().count(all).await.unwrap(), 1); + assert_eq!(repo.personal_session().count(active).await.unwrap(), 0); + assert_eq!(repo.personal_session().count(finished).await.unwrap(), 1); + + let full_list = repo.personal_session().list(all, pagination).await.unwrap(); + assert_eq!(full_list.edges.len(), 1); + assert_eq!(full_list.edges[0].node.0.id, session.id); + let active_list = repo + .personal_session() + .list(active, pagination) + .await + .unwrap(); + assert!(active_list.edges.is_empty()); + let finished_list = repo + .personal_session() + .list(finished, pagination) + .await + .unwrap(); + assert_eq!(finished_list.edges.len(), 1); + assert_eq!(finished_list.edges[0].node.0.id, session.id); + assert!(finished_list.edges[0].node.0.is_revoked()); + + // Reload the session and check again + let session_lookup = repo + .personal_session() + .lookup(session.id) + .await + .unwrap() + .expect("personal session not found"); + assert!(!session_lookup.is_valid()); + assert!(session_lookup.is_revoked()); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_session_revoke_bulk(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + + let alice_user = repo + .user() + .add(&mut rng, &clock, "alice".to_owned()) + .await + .unwrap(); + let bob_user = repo + .user() + .add(&mut rng, &clock, "bob".to_owned()) + .await + .unwrap(); + + let session1 = repo + .personal_session() + .add( + &mut rng, + &clock, + (&alice_user).into(), + &bob_user, + "Test Personal Session".to_owned(), + "openid".parse().unwrap(), + ) + .await + .unwrap(); + repo.personal_access_token() + .add( + &mut rng, + &clock, + &session1, + "mpt_hiss", + Some(Duration::days(42)), + ) + .await + .unwrap(); + + let session2 = repo + .personal_session() + .add( + &mut rng, + &clock, + (&bob_user).into(), + &bob_user, + "Test Personal Session".to_owned(), + "openid".parse().unwrap(), + ) + .await + .unwrap(); + repo.personal_access_token() + .add( + &mut rng, &clock, &session2, "mpt_meow", // No expiry + None, + ) + .await + .unwrap(); + + // Just one session without a token expiry time + assert_eq!( + repo.personal_session() + .revoke_bulk( + &clock, + PersonalSessionFilter::new() + .active_only() + .with_expires(false) + ) + .await + .unwrap(), + 1 + ); + + // Just one session with a token expiry time + assert_eq!( + repo.personal_session() + .revoke_bulk( + &clock, + PersonalSessionFilter::new() + .active_only() + .with_expires(true) + ) + .await + .unwrap(), + 1 + ); + + // No active sessions left + assert_eq!( + repo.personal_session() + .revoke_bulk(&clock, PersonalSessionFilter::new().active_only()) + .await + .unwrap(), + 0 + ); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_access_token_repository(pool: PgPool) { + const FIRST_TOKEN: &str = "first_access_token"; + const SECOND_TOKEN: &str = "second_access_token"; + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Create a user + let admin_user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + let bot_user = repo + .user() + .add(&mut rng, &clock, "marvin".to_owned()) + .await + .unwrap(); + + // Start a personal session for that user + let device = Device::generate(&mut rng); + let scope: Scope = [OPENID, PROFILE] + .into_iter() + .chain(device.to_scope_token().unwrap()) + .collect(); + let session = repo + .personal_session() + .add( + &mut rng, + &clock, + (&admin_user).into(), + &bot_user, + "Test Personal Session".to_owned(), + scope, + ) + .await + .unwrap(); + + // Add an access token to that session + let token = repo + .personal_access_token() + .add( + &mut rng, + &clock, + &session, + FIRST_TOKEN, + Some(Duration::try_minutes(1).unwrap()), + ) + .await + .unwrap(); + assert_eq!(token.session_id, session.id); + + // Commit the txn and grab a new transaction, to test a conflict + repo.save().await.unwrap(); + + { + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + // Adding the same token a second time should conflict + assert!( + repo.personal_access_token() + .add( + &mut rng, + &clock, + &session, + FIRST_TOKEN, + Some(Duration::try_minutes(1).unwrap()), + ) + .await + .is_err() + ); + repo.cancel().await.unwrap(); + } + + // Grab a new repo + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Looking up via ID works + let token_lookup = repo + .personal_access_token() + .lookup(token.id) + .await + .unwrap() + .expect("personal access token not found"); + assert_eq!(token.id, token_lookup.id); + assert_eq!(token_lookup.session_id, session.id); + + // Looking up via the token value works + let token_lookup = repo + .personal_access_token() + .find_by_token(FIRST_TOKEN) + .await + .unwrap() + .expect("personal access token not found"); + assert_eq!(token.id, token_lookup.id); + assert_eq!(token_lookup.session_id, session.id); + + // Token is currently valid + assert!(token.is_valid(clock.now())); + + clock.advance(Duration::try_minutes(1).unwrap()); + // Token should have expired + assert!(!token.is_valid(clock.now())); + + // Add a second access token, this time without expiration + let _token = repo + .personal_access_token() + .revoke(&clock, token) + .await + .unwrap(); + let token = repo + .personal_access_token() + .add(&mut rng, &clock, &session, SECOND_TOKEN, None) + .await + .unwrap(); + assert_eq!(token.session_id, session.id); + + // Token is currently valid + assert!(token.is_valid(clock.now())); + + // Revoke it + let _token = repo + .personal_access_token() + .revoke(&clock, token) + .await + .unwrap(); + + // Reload it + let token = repo + .personal_access_token() + .find_by_token(SECOND_TOKEN) + .await + .unwrap() + .expect("personal access token not found"); + + // Token is not valid anymore + assert!(!token.is_valid(clock.now())); + + repo.save().await.unwrap(); + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/personal/session.rs b/matrix-authentication-service/crates/storage-pg/src/personal/session.rs new file mode 100644 index 00000000..b4c330ec --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/personal/session.rs @@ -0,0 +1,702 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + Clock, User, + personal::{ + PersonalAccessToken, + session::{PersonalSession, PersonalSessionOwner, SessionState}, + }, +}; +use mas_storage::{ + Page, Pagination, + pagination::Node, + personal::{PersonalSessionFilter, PersonalSessionRepository, PersonalSessionState}, +}; +use oauth2_types::scope::Scope; +use opentelemetry_semantic_conventions::trace::DB_QUERY_TEXT; +use rand::RngCore; +use sea_query::{ + Cond, Condition, Expr, PgFunc, PostgresQueryBuilder, Query, SimpleExpr, enum_def, + extension::postgres::PgExpr as _, +}; +use sea_query_binder::SqlxBinder as _; +use sqlx::PgConnection; +use tracing::{Instrument as _, info_span}; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseError, + errors::DatabaseInconsistencyError, + filter::{Filter, StatementExt as _}, + iden::{PersonalAccessTokens, PersonalSessions}, + pagination::QueryBuilderExt as _, + tracing::ExecuteExt as _, +}; + +/// An implementation of [`PersonalSessionRepository`] for a PostgreSQL +/// connection +pub struct PgPersonalSessionRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgPersonalSessionRepository<'c> { + /// Create a new [`PgPersonalSessionRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[derive(sqlx::FromRow)] +#[enum_def] +struct PersonalSessionLookup { + personal_session_id: Uuid, + owner_user_id: Option, + owner_oauth2_client_id: Option, + actor_user_id: Uuid, + human_name: String, + scope_list: Vec, + created_at: DateTime, + revoked_at: Option>, + last_active_at: Option>, + last_active_ip: Option, +} + +impl Node for PersonalSessionLookup { + fn cursor(&self) -> Ulid { + self.personal_session_id.into() + } +} + +impl TryFrom for PersonalSession { + type Error = DatabaseInconsistencyError; + + fn try_from(value: PersonalSessionLookup) -> Result { + let id = Ulid::from(value.personal_session_id); + let scope: Result = value.scope_list.iter().map(|s| s.parse()).collect(); + let scope = scope.map_err(|e| { + DatabaseInconsistencyError::on("personal_sessions") + .column("scope") + .row(id) + .source(e) + })?; + + let state = match value.revoked_at { + None => SessionState::Valid, + Some(revoked_at) => SessionState::Revoked { revoked_at }, + }; + + let owner = match (value.owner_user_id, value.owner_oauth2_client_id) { + (Some(owner_user_id), None) => PersonalSessionOwner::User(Ulid::from(owner_user_id)), + (None, Some(owner_oauth2_client_id)) => { + PersonalSessionOwner::OAuth2Client(Ulid::from(owner_oauth2_client_id)) + } + _ => { + // should be impossible (CHECK constraint in Postgres prevents it) + return Err(DatabaseInconsistencyError::on("personal_sessions") + .column("owner_user_id, owner_oauth2_client_id") + .row(id)); + } + }; + + Ok(PersonalSession { + id, + state, + owner, + actor_user_id: Ulid::from(value.actor_user_id), + human_name: value.human_name, + scope, + created_at: value.created_at, + last_active_at: value.last_active_at, + last_active_ip: value.last_active_ip, + }) + } +} + +#[derive(sqlx::FromRow)] +#[enum_def] +struct PersonalSessionAndAccessTokenLookup { + personal_session_id: Uuid, + owner_user_id: Option, + owner_oauth2_client_id: Option, + actor_user_id: Uuid, + human_name: String, + scope_list: Vec, + created_at: DateTime, + revoked_at: Option>, + last_active_at: Option>, + last_active_ip: Option, + + // tokens + personal_access_token_id: Option, + token_created_at: Option>, + token_expires_at: Option>, +} + +impl Node for PersonalSessionAndAccessTokenLookup { + fn cursor(&self) -> Ulid { + self.personal_session_id.into() + } +} + +impl TryFrom + for (PersonalSession, Option) +{ + type Error = DatabaseInconsistencyError; + + fn try_from(value: PersonalSessionAndAccessTokenLookup) -> Result { + let session = PersonalSession::try_from(PersonalSessionLookup { + personal_session_id: value.personal_session_id, + owner_user_id: value.owner_user_id, + owner_oauth2_client_id: value.owner_oauth2_client_id, + actor_user_id: value.actor_user_id, + human_name: value.human_name, + scope_list: value.scope_list, + created_at: value.created_at, + revoked_at: value.revoked_at, + last_active_at: value.last_active_at, + last_active_ip: value.last_active_ip, + })?; + + let token_opt = if let Some(id) = value.personal_access_token_id { + let id = Ulid::from(id); + Some(PersonalAccessToken { + id, + session_id: session.id, + // should not be possible + created_at: value.token_created_at.ok_or( + DatabaseInconsistencyError::on("personal_sessions") + .column("created_at") + .row(id), + )?, + expires_at: value.token_expires_at, + revoked_at: None, + }) + } else { + None + }; + + Ok((session, token_opt)) + } +} + +#[async_trait] +impl PersonalSessionRepository for PgPersonalSessionRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.personal_session.lookup", + skip_all, + fields( + db.query.text, + session.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + PersonalSessionLookup, + r#" + SELECT personal_session_id + , owner_user_id + , owner_oauth2_client_id + , actor_user_id + , scope_list + , created_at + , revoked_at + , human_name + , last_active_at + , last_active_ip as "last_active_ip: IpAddr" + FROM personal_sessions + + WHERE personal_session_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(session) = res else { return Ok(None) }; + + Ok(Some(session.try_into()?)) + } + + #[tracing::instrument( + name = "db.personal_session.add", + skip_all, + fields( + db.query.text, + session.id, + session.scope = %scope, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + owner: PersonalSessionOwner, + actor_user: &User, + human_name: String, + scope: Scope, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("session.id", tracing::field::display(id)); + + let scope_list: Vec = scope.iter().map(|s| s.as_str().to_owned()).collect(); + + let (owner_user_id, owner_oauth2_client_id) = match owner { + PersonalSessionOwner::User(ulid) => (Some(Uuid::from(ulid)), None), + PersonalSessionOwner::OAuth2Client(ulid) => (None, Some(Uuid::from(ulid))), + }; + + sqlx::query!( + r#" + INSERT INTO personal_sessions + ( personal_session_id + , owner_user_id + , owner_oauth2_client_id + , actor_user_id + , human_name + , scope_list + , created_at + ) + VALUES ($1, $2, $3, $4, $5, $6, $7) + "#, + Uuid::from(id), + owner_user_id, + owner_oauth2_client_id, + Uuid::from(actor_user.id), + &human_name, + &scope_list, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(PersonalSession { + id, + state: SessionState::Valid, + owner, + actor_user_id: actor_user.id, + human_name, + scope, + created_at, + last_active_at: None, + last_active_ip: None, + }) + } + + #[tracing::instrument( + name = "db.personal_session.revoke", + skip_all, + fields( + db.query.text, + %session.id, + %session.scope, + ), + err, + )] + async fn revoke( + &mut self, + clock: &dyn Clock, + session: PersonalSession, + ) -> Result { + let revoked_at = clock.now(); + + { + // Revoke dependent PATs + let span = info_span!( + "db.personal_session.revoke.tokens", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + + sqlx::query!( + r#" + UPDATE personal_access_tokens + SET revoked_at = $2 + WHERE personal_session_id = $1 AND revoked_at IS NULL + "#, + Uuid::from(session.id), + revoked_at, + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + } + + let res = sqlx::query!( + r#" + UPDATE personal_sessions + SET revoked_at = $2 + WHERE personal_session_id = $1 + "#, + Uuid::from(session.id), + revoked_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + session + .finish(revoked_at) + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.personal_session.revoke_bulk", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn revoke_bulk( + &mut self, + clock: &dyn Clock, + filter: PersonalSessionFilter<'_>, + ) -> Result { + let revoked_at = clock.now(); + + let (sql, arguments) = Query::update() + .table(PersonalSessions::Table) + .value(PersonalSessions::RevokedAt, revoked_at) + .and_where( + Expr::col((PersonalSessions::Table, PersonalSessions::PersonalSessionId)) + // Because filters apply to both the session and access token tables, + // Use a subquery to make it possible to use a JOIN + // onto the personal access token table. + .in_subquery( + Query::select() + .expr(Expr::col(( + PersonalSessions::Table, + PersonalSessions::PersonalSessionId, + ))) + .from(PersonalSessions::Table) + .left_join( + PersonalAccessTokens::Table, + Cond::all() + // Match session ID + .add( + Expr::col(( + PersonalSessions::Table, + PersonalSessions::PersonalSessionId, + )) + .eq(Expr::col(( + PersonalAccessTokens::Table, + PersonalAccessTokens::PersonalSessionId, + ))), + ) + // Only choose the active access token for each session + .add( + Expr::col(( + PersonalAccessTokens::Table, + PersonalAccessTokens::RevokedAt, + )) + .is_null(), + ), + ) + .apply_filter(filter) + .take(), + ), + ) + .build_sqlx(PostgresQueryBuilder); + + let res = sqlx::query_with(&sql, arguments) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(res.rows_affected().try_into().unwrap_or(usize::MAX)) + } + + #[tracing::instrument( + name = "db.personal_session.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: PersonalSessionFilter<'_>, + pagination: Pagination, + ) -> Result)>, Self::Error> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col((PersonalSessions::Table, PersonalSessions::PersonalSessionId)), + PersonalSessionAndAccessTokenLookupIden::PersonalSessionId, + ) + .expr_as( + Expr::col((PersonalSessions::Table, PersonalSessions::OwnerUserId)), + PersonalSessionAndAccessTokenLookupIden::OwnerUserId, + ) + .expr_as( + Expr::col(( + PersonalSessions::Table, + PersonalSessions::OwnerOAuth2ClientId, + )), + PersonalSessionAndAccessTokenLookupIden::OwnerOauth2ClientId, + ) + .expr_as( + Expr::col((PersonalSessions::Table, PersonalSessions::ActorUserId)), + PersonalSessionAndAccessTokenLookupIden::ActorUserId, + ) + .expr_as( + Expr::col((PersonalSessions::Table, PersonalSessions::HumanName)), + PersonalSessionAndAccessTokenLookupIden::HumanName, + ) + .expr_as( + Expr::col((PersonalSessions::Table, PersonalSessions::ScopeList)), + PersonalSessionAndAccessTokenLookupIden::ScopeList, + ) + .expr_as( + Expr::col((PersonalSessions::Table, PersonalSessions::CreatedAt)), + PersonalSessionAndAccessTokenLookupIden::CreatedAt, + ) + .expr_as( + Expr::col((PersonalSessions::Table, PersonalSessions::RevokedAt)), + PersonalSessionAndAccessTokenLookupIden::RevokedAt, + ) + .expr_as( + Expr::col((PersonalSessions::Table, PersonalSessions::LastActiveAt)), + PersonalSessionAndAccessTokenLookupIden::LastActiveAt, + ) + .expr_as( + Expr::col((PersonalSessions::Table, PersonalSessions::LastActiveIp)), + PersonalSessionAndAccessTokenLookupIden::LastActiveIp, + ) + .expr_as( + Expr::col(( + PersonalAccessTokens::Table, + PersonalAccessTokens::PersonalAccessTokenId, + )), + PersonalSessionAndAccessTokenLookupIden::PersonalAccessTokenId, + ) + .expr_as( + Expr::col((PersonalAccessTokens::Table, PersonalAccessTokens::CreatedAt)), + PersonalSessionAndAccessTokenLookupIden::TokenCreatedAt, + ) + .expr_as( + Expr::col((PersonalAccessTokens::Table, PersonalAccessTokens::ExpiresAt)), + PersonalSessionAndAccessTokenLookupIden::TokenExpiresAt, + ) + .from(PersonalSessions::Table) + .left_join( + PersonalAccessTokens::Table, + Cond::all() + // Match session ID + .add( + Expr::col((PersonalSessions::Table, PersonalSessions::PersonalSessionId)) + .eq(Expr::col(( + PersonalAccessTokens::Table, + PersonalAccessTokens::PersonalSessionId, + ))), + ) + // Only choose the active access token for each session + .add( + Expr::col((PersonalAccessTokens::Table, PersonalAccessTokens::RevokedAt)) + .is_null(), + ), + ) + .apply_filter(filter) + .generate_pagination( + (PersonalSessions::Table, PersonalSessions::PersonalSessionId), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination.process(edges).try_map(TryFrom::try_from)?; + + Ok(page) + } + + #[tracing::instrument( + name = "db.personal_session.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count(&mut self, filter: PersonalSessionFilter<'_>) -> Result { + let (sql, arguments) = Query::select() + .expr(Expr::col((PersonalSessions::Table, PersonalSessions::PersonalSessionId)).count()) + .from(PersonalSessions::Table) + .left_join( + PersonalAccessTokens::Table, + Cond::all() + // Match session ID + .add( + Expr::col((PersonalSessions::Table, PersonalSessions::PersonalSessionId)) + .eq(Expr::col(( + PersonalAccessTokens::Table, + PersonalAccessTokens::PersonalSessionId, + ))), + ) + // Only choose the active access token for each session + .add( + Expr::col((PersonalAccessTokens::Table, PersonalAccessTokens::RevokedAt)) + .is_null(), + ), + ) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.personal_session.record_batch_activity", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn record_batch_activity( + &mut self, + mut activities: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error> { + // Sort the activity by ID, so that when batching the updates, Postgres + // locks the rows in a stable order, preventing deadlocks + activities.sort_unstable(); + let mut ids = Vec::with_capacity(activities.len()); + let mut last_activities = Vec::with_capacity(activities.len()); + let mut ips = Vec::with_capacity(activities.len()); + + for (id, last_activity, ip) in activities { + ids.push(Uuid::from(id)); + last_activities.push(last_activity); + ips.push(ip); + } + + let res = sqlx::query!( + r#" + UPDATE personal_sessions + SET last_active_at = GREATEST(t.last_active_at, personal_sessions.last_active_at) + , last_active_ip = COALESCE(t.last_active_ip, personal_sessions.last_active_ip) + FROM ( + SELECT * + FROM UNNEST($1::uuid[], $2::timestamptz[], $3::inet[]) + AS t(personal_session_id, last_active_at, last_active_ip) + ) AS t + WHERE personal_sessions.personal_session_id = t.personal_session_id + "#, + &ids, + &last_activities, + &ips as &[Option], + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, ids.len().try_into().unwrap_or(u64::MAX))?; + + Ok(()) + } +} + +impl Filter for PersonalSessionFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.owner_user().map(|user| { + Expr::col((PersonalSessions::Table, PersonalSessions::OwnerUserId)) + .eq(Uuid::from(user.id)) + })) + .add_option(self.owner_oauth2_client().map(|client| { + Expr::col(( + PersonalSessions::Table, + PersonalSessions::OwnerOAuth2ClientId, + )) + .eq(Uuid::from(client.id)) + })) + .add_option(self.actor_user().map(|user| { + Expr::col((PersonalSessions::Table, PersonalSessions::ActorUserId)) + .eq(Uuid::from(user.id)) + })) + .add_option(self.device().map(|device| -> SimpleExpr { + if let Ok([stable_scope_token, unstable_scope_token]) = device.to_scope_token() { + Condition::any() + .add( + Expr::val(stable_scope_token.to_string()).eq(PgFunc::any(Expr::col(( + PersonalSessions::Table, + PersonalSessions::ScopeList, + )))), + ) + .add(Expr::val(unstable_scope_token.to_string()).eq(PgFunc::any( + Expr::col((PersonalSessions::Table, PersonalSessions::ScopeList)), + ))) + .into() + } else { + // If the device ID can't be encoded as a scope token, match no rows + Expr::val(false).into() + } + })) + .add_option(self.state().map(|state| match state { + PersonalSessionState::Active => { + Expr::col((PersonalSessions::Table, PersonalSessions::RevokedAt)).is_null() + } + PersonalSessionState::Revoked => { + Expr::col((PersonalSessions::Table, PersonalSessions::RevokedAt)).is_not_null() + } + })) + .add_option(self.scope().map(|scope| { + let scope: Vec = scope.iter().map(|s| s.as_str().to_owned()).collect(); + Expr::col((PersonalSessions::Table, PersonalSessions::ScopeList)).contains(scope) + })) + .add_option(self.last_active_before().map(|last_active_before| { + Expr::col((PersonalSessions::Table, PersonalSessions::LastActiveAt)) + .lt(last_active_before) + })) + .add_option(self.last_active_after().map(|last_active_after| { + Expr::col((PersonalSessions::Table, PersonalSessions::LastActiveAt)) + .gt(last_active_after) + })) + .add_option(self.expires_before().map(|expires_before| { + Expr::col((PersonalAccessTokens::Table, PersonalAccessTokens::ExpiresAt)) + .lt(expires_before) + })) + .add_option(self.expires_after().map(|expires_after| { + Expr::col((PersonalAccessTokens::Table, PersonalAccessTokens::ExpiresAt)) + .gt(expires_after) + })) + .add_option(self.expires().map(|expires| { + let column = + Expr::col((PersonalAccessTokens::Table, PersonalAccessTokens::ExpiresAt)); + + if expires { + column.is_not_null() + } else { + column.is_null() + } + })) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/policy_data.rs b/matrix-authentication-service/crates/storage-pg/src/policy_data.rs new file mode 100644 index 00000000..3bcb34d6 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/policy_data.rs @@ -0,0 +1,205 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing the PostgreSQL implementation of the policy data +//! storage. + +use async_trait::async_trait; +use mas_data_model::{Clock, PolicyData}; +use mas_storage::policy_data::PolicyDataRepository; +use rand::RngCore; +use serde_json::Value; +use sqlx::{PgConnection, types::Json}; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, ExecuteExt}; + +/// An implementation of [`PolicyDataRepository`] for a PostgreSQL connection. +pub struct PgPolicyDataRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgPolicyDataRepository<'c> { + /// Create a new [`PgPolicyDataRepository`] from an active PostgreSQL + /// connection. + #[must_use] + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct PolicyDataLookup { + policy_data_id: Uuid, + created_at: chrono::DateTime, + data: Json, +} + +impl From for PolicyData { + fn from(value: PolicyDataLookup) -> Self { + PolicyData { + id: value.policy_data_id.into(), + created_at: value.created_at, + data: value.data.0, + } + } +} + +#[async_trait] +impl PolicyDataRepository for PgPolicyDataRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.policy_data.get", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn get(&mut self) -> Result, Self::Error> { + let row = sqlx::query_as!( + PolicyDataLookup, + r#" + SELECT policy_data_id, created_at, data + FROM policy_data + ORDER BY policy_data_id DESC + LIMIT 1 + "# + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(row) = row else { + return Ok(None); + }; + + Ok(Some(row.into())) + } + + #[tracing::instrument( + name = "db.policy_data.set", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn set( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + data: Value, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + + sqlx::query!( + r#" + INSERT INTO policy_data (policy_data_id, created_at, data) + VALUES ($1, $2, $3) + "#, + Uuid::from(id), + created_at, + data, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(PolicyData { + id, + created_at, + data, + }) + } + + #[tracing::instrument( + name = "db.policy_data.prune", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn prune(&mut self, keep: usize) -> Result { + let res = sqlx::query!( + r#" + DELETE FROM policy_data + WHERE policy_data_id IN ( + SELECT policy_data_id + FROM policy_data + ORDER BY policy_data_id DESC + OFFSET $1 + ) + "#, + i64::try_from(keep).map_err(DatabaseError::to_invalid_operation)? + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(res + .rows_affected() + .try_into() + .map_err(DatabaseError::to_invalid_operation)?) + } +} + +#[cfg(test)] +mod tests { + use mas_data_model::clock::MockClock; + use mas_storage::policy_data::PolicyDataRepository; + use rand::SeedableRng; + use rand_chacha::ChaChaRng; + use serde_json::json; + use sqlx::PgPool; + + use crate::policy_data::PgPolicyDataRepository; + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_policy_data(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut conn = pool.acquire().await.unwrap(); + let mut repo = PgPolicyDataRepository::new(&mut conn); + + // Get an empty state at first + let data = repo.get().await.unwrap(); + assert_eq!(data, None); + + // Set some data + let value1 = json!({"hello": "world"}); + let policy_data1 = repo.set(&mut rng, &clock, value1.clone()).await.unwrap(); + assert_eq!(policy_data1.data, value1); + + let data_fetched1 = repo.get().await.unwrap().unwrap(); + assert_eq!(policy_data1, data_fetched1); + + // Set some new data + clock.advance(chrono::Duration::seconds(1)); + let value2 = json!({"foo": "bar"}); + let policy_data2 = repo.set(&mut rng, &clock, value2.clone()).await.unwrap(); + assert_eq!(policy_data2.data, value2); + + // Check the new data is fetched + let data_fetched2 = repo.get().await.unwrap().unwrap(); + assert_eq!(data_fetched2, policy_data2); + + // Prune until the first entry + let affected = repo.prune(1).await.unwrap(); + let data_fetched3 = repo.get().await.unwrap().unwrap(); + assert_eq!(data_fetched3, policy_data2); + assert_eq!(affected, 1); + + // Do a raw query to check the other rows were pruned + let count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM policy_data") + .fetch_one(&mut *conn) + .await + .unwrap(); + assert_eq!(count, 1); + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/queue/job.rs b/matrix-authentication-service/crates/storage-pg/src/queue/job.rs new file mode 100644 index 00000000..f3d1da69 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/queue/job.rs @@ -0,0 +1,498 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing the PostgreSQL implementation of the +//! [`QueueJobRepository`]. + +use async_trait::async_trait; +use chrono::{DateTime, Duration, Utc}; +use mas_data_model::Clock; +use mas_storage::queue::{Job, QueueJobRepository, Worker}; +use opentelemetry_semantic_conventions::trace::DB_QUERY_TEXT; +use rand::RngCore; +use sqlx::PgConnection; +use tracing::Instrument; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, DatabaseInconsistencyError, ExecuteExt}; + +/// An implementation of [`QueueJobRepository`] for a PostgreSQL connection. +pub struct PgQueueJobRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgQueueJobRepository<'c> { + /// Create a new [`PgQueueJobRepository`] from an active PostgreSQL + /// connection. + #[must_use] + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct JobReservationResult { + queue_job_id: Uuid, + queue_name: String, + payload: serde_json::Value, + metadata: serde_json::Value, + attempt: i32, +} + +impl TryFrom for Job { + type Error = DatabaseInconsistencyError; + + fn try_from(value: JobReservationResult) -> Result { + let id = value.queue_job_id.into(); + let queue_name = value.queue_name; + let payload = value.payload; + + let metadata = serde_json::from_value(value.metadata).map_err(|e| { + DatabaseInconsistencyError::on("queue_jobs") + .column("metadata") + .row(id) + .source(e) + })?; + + let attempt = value.attempt.try_into().map_err(|e| { + DatabaseInconsistencyError::on("queue_jobs") + .column("attempt") + .row(id) + .source(e) + })?; + + Ok(Self { + id, + queue_name, + payload, + metadata, + attempt, + }) + } +} + +#[async_trait] +impl QueueJobRepository for PgQueueJobRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.queue_job.schedule", + fields( + queue_job.id, + queue_job.queue_name = queue_name, + db.query.text, + ), + skip_all, + err, + )] + async fn schedule( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + queue_name: &str, + payload: serde_json::Value, + metadata: serde_json::Value, + ) -> Result<(), Self::Error> { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("queue_job.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO queue_jobs + (queue_job_id, queue_name, payload, metadata, created_at) + VALUES ($1, $2, $3, $4, $5) + "#, + Uuid::from(id), + queue_name, + payload, + metadata, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.queue_job.schedule_later", + fields( + queue_job.id, + queue_job.queue_name = queue_name, + queue_job.scheduled_at = %scheduled_at, + db.query.text, + ), + skip_all, + err, + )] + async fn schedule_later( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + queue_name: &str, + payload: serde_json::Value, + metadata: serde_json::Value, + scheduled_at: DateTime, + schedule_name: Option<&str>, + ) -> Result<(), Self::Error> { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("queue_job.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO queue_jobs + (queue_job_id, queue_name, payload, metadata, created_at, scheduled_at, schedule_name, status) + VALUES ($1, $2, $3, $4, $5, $6, $7, 'scheduled') + "#, + Uuid::from(id), + queue_name, + payload, + metadata, + created_at, + scheduled_at, + schedule_name, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + // If there was a schedule name supplied, update the queue_schedules table + if let Some(schedule_name) = schedule_name { + let span = tracing::info_span!( + "db.queue_job.schedule_later.update_schedules", + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + + let res = sqlx::query!( + r#" + UPDATE queue_schedules + SET last_scheduled_at = $1, + last_scheduled_job_id = $2 + WHERE schedule_name = $3 + "#, + scheduled_at, + Uuid::from(id), + schedule_name, + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + } + + Ok(()) + } + + #[tracing::instrument( + name = "db.queue_job.reserve", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn reserve( + &mut self, + clock: &dyn Clock, + worker: &Worker, + queues: &[&str], + count: usize, + ) -> Result, Self::Error> { + let now = clock.now(); + let max_count = i64::try_from(count).unwrap_or(i64::MAX); + let queues: Vec = queues.iter().map(|&s| s.to_owned()).collect(); + let results = sqlx::query_as!( + JobReservationResult, + r#" + -- We first grab a few jobs that are available, + -- using a FOR UPDATE SKIP LOCKED so that this can be run concurrently + -- and we don't get multiple workers grabbing the same jobs + WITH locked_jobs AS ( + SELECT queue_job_id + FROM queue_jobs + WHERE + status = 'available' + AND queue_name = ANY($1) + ORDER BY queue_job_id ASC + LIMIT $2 + FOR UPDATE + SKIP LOCKED + ) + -- then we update the status of those jobs to 'running', returning the job details + UPDATE queue_jobs + SET status = 'running', started_at = $3, started_by = $4 + FROM locked_jobs + WHERE queue_jobs.queue_job_id = locked_jobs.queue_job_id + RETURNING + queue_jobs.queue_job_id, + queue_jobs.queue_name, + queue_jobs.payload, + queue_jobs.metadata, + queue_jobs.attempt + "#, + &queues, + max_count, + now, + Uuid::from(worker.id), + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let jobs = results + .into_iter() + .map(TryFrom::try_from) + .collect::, _>>()?; + + Ok(jobs) + } + + #[tracing::instrument( + name = "db.queue_job.mark_as_completed", + skip_all, + fields( + db.query.text, + job.id = %id, + ), + err, + )] + async fn mark_as_completed(&mut self, clock: &dyn Clock, id: Ulid) -> Result<(), Self::Error> { + let now = clock.now(); + let res = sqlx::query!( + r#" + UPDATE queue_jobs + SET status = 'completed', completed_at = $1 + WHERE queue_job_id = $2 AND status = 'running' + "#, + now, + Uuid::from(id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.queue_job.mark_as_failed", + skip_all, + fields( + db.query.text, + job.id = %id, + ), + err + )] + async fn mark_as_failed( + &mut self, + clock: &dyn Clock, + id: Ulid, + reason: &str, + ) -> Result<(), Self::Error> { + let now = clock.now(); + let res = sqlx::query!( + r#" + UPDATE queue_jobs + SET + status = 'failed', + failed_at = $1, + failed_reason = $2 + WHERE + queue_job_id = $3 + AND status = 'running' + "#, + now, + reason, + Uuid::from(id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.queue_job.retry", + skip_all, + fields( + db.query.text, + job.id = %id, + ), + err + )] + async fn retry( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + id: Ulid, + delay: Duration, + ) -> Result<(), Self::Error> { + let now = clock.now(); + let scheduled_at = now + delay; + let new_id = Ulid::from_datetime_with_source(now.into(), rng); + + let span = tracing::info_span!( + "db.queue_job.retry.insert_job", + { DB_QUERY_TEXT } = tracing::field::Empty + ); + // Create a new job with the same payload and metadata, but a new ID and + // increment the attempt + // We make sure we do this only for 'failed' jobs + let res = sqlx::query!( + r#" + INSERT INTO queue_jobs + (queue_job_id, queue_name, payload, metadata, created_at, + attempt, scheduled_at, schedule_name, status) + SELECT $1, queue_name, payload, metadata, $2, attempt + 1, $3, schedule_name, 'scheduled' + FROM queue_jobs + WHERE queue_job_id = $4 + AND status = 'failed' + "#, + Uuid::from(new_id), + now, + scheduled_at, + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + // If that job was referenced by a schedule, update the schedule + let span = tracing::info_span!( + "db.queue_job.retry.update_schedule", + { DB_QUERY_TEXT } = tracing::field::Empty + ); + sqlx::query!( + r#" + UPDATE queue_schedules + SET last_scheduled_at = $1, + last_scheduled_job_id = $2 + WHERE last_scheduled_job_id = $3 + "#, + scheduled_at, + Uuid::from(new_id), + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + + // Update the old job to point to the new attempt + let span = tracing::info_span!( + "db.queue_job.retry.update_old_job", + { DB_QUERY_TEXT } = tracing::field::Empty + ); + let res = sqlx::query!( + r#" + UPDATE queue_jobs + SET next_attempt_id = $1 + WHERE queue_job_id = $2 + "#, + Uuid::from(new_id), + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.queue_job.schedule_available_jobs", + skip_all, + fields( + db.query.text, + ), + err + )] + async fn schedule_available_jobs(&mut self, clock: &dyn Clock) -> Result { + let now = clock.now(); + let res = sqlx::query!( + r#" + UPDATE queue_jobs + SET status = 'available' + WHERE + status = 'scheduled' + AND scheduled_at <= $1 + "#, + now, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + let count = res.rows_affected(); + Ok(usize::try_from(count).unwrap_or(usize::MAX)) + } + + #[tracing::instrument( + name = "db.queue_job.cleanup", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error> { + // Use ULID cursor-based pagination for completed and failed jobs. + // We delete both completed and failed jobs in the same batch. + // `MAX(uuid)` isn't a thing in Postgres, so we aggregate on the client side. + let res = sqlx::query_scalar!( + r#" + WITH to_delete AS ( + SELECT queue_job_id + FROM queue_jobs + WHERE (status = 'completed' OR status = 'failed') + AND ($1::uuid IS NULL OR queue_job_id > $1) + AND queue_job_id <= $2 + ORDER BY queue_job_id + LIMIT $3 + ) + DELETE FROM queue_jobs + USING to_delete + WHERE queue_jobs.queue_job_id = to_delete.queue_job_id + RETURNING queue_jobs.queue_job_id + "#, + since.map(Uuid::from), + Uuid::from(until), + i64::try_from(limit).unwrap_or(i64::MAX) + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let count = res.len(); + let max_id = res.into_iter().max(); + + Ok((count, max_id.map(Ulid::from))) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/queue/mod.rs b/matrix-authentication-service/crates/storage-pg/src/queue/mod.rs new file mode 100644 index 00000000..d3570e59 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/queue/mod.rs @@ -0,0 +1,10 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing the PostgreSQL implementation of the job queue + +pub mod job; +pub mod schedule; +pub mod worker; diff --git a/matrix-authentication-service/crates/storage-pg/src/queue/schedule.rs b/matrix-authentication-service/crates/storage-pg/src/queue/schedule.rs new file mode 100644 index 00000000..fd28ef14 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/queue/schedule.rs @@ -0,0 +1,86 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing the PostgreSQL implementation of the +//! [`QueueScheduleRepository`]. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_storage::queue::{QueueScheduleRepository, ScheduleStatus}; +use sqlx::PgConnection; + +use crate::{DatabaseError, ExecuteExt}; + +/// An implementation of [`QueueScheduleRepository`] for a PostgreSQL +/// connection. +pub struct PgQueueScheduleRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgQueueScheduleRepository<'c> { + /// Create a new [`PgQueueScheduleRepository`] from an active PostgreSQL + /// connection. + #[must_use] + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct ScheduleLookup { + schedule_name: String, + last_scheduled_at: Option>, + last_scheduled_job_completed: Option, +} + +impl From for ScheduleStatus { + fn from(value: ScheduleLookup) -> Self { + ScheduleStatus { + schedule_name: value.schedule_name, + last_scheduled_at: value.last_scheduled_at, + last_scheduled_job_completed: value.last_scheduled_job_completed, + } + } +} + +#[async_trait] +impl QueueScheduleRepository for PgQueueScheduleRepository<'_> { + type Error = DatabaseError; + + async fn setup(&mut self, schedules: &[&'static str]) -> Result<(), Self::Error> { + sqlx::query!( + r#" + INSERT INTO queue_schedules (schedule_name) + SELECT * FROM UNNEST($1::text[]) AS t (schedule_name) + ON CONFLICT (schedule_name) DO NOTHING + "#, + &schedules.iter().map(|&s| s.to_owned()).collect::>(), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(()) + } + + async fn list(&mut self) -> Result, Self::Error> { + let res = sqlx::query_as!( + ScheduleLookup, + r#" + SELECT + queue_schedules.schedule_name as "schedule_name!", + queue_schedules.last_scheduled_at, + queue_jobs.status IN ('completed', 'failed') as last_scheduled_job_completed + FROM queue_schedules + LEFT JOIN queue_jobs + ON queue_jobs.queue_job_id = queue_schedules.last_scheduled_job_id + "# + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + Ok(res.into_iter().map(Into::into).collect()) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/queue/worker.rs b/matrix-authentication-service/crates/storage-pg/src/queue/worker.rs new file mode 100644 index 00000000..c7dbc1f8 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/queue/worker.rs @@ -0,0 +1,259 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing the PostgreSQL implementation of the +//! [`QueueWorkerRepository`]. + +use async_trait::async_trait; +use chrono::Duration; +use mas_data_model::Clock; +use mas_storage::queue::{QueueWorkerRepository, Worker}; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, ExecuteExt}; + +/// An implementation of [`QueueWorkerRepository`] for a PostgreSQL connection. +pub struct PgQueueWorkerRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgQueueWorkerRepository<'c> { + /// Create a new [`PgQueueWorkerRepository`] from an active PostgreSQL + /// connection. + #[must_use] + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[async_trait] +impl QueueWorkerRepository for PgQueueWorkerRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.queue_worker.register", + skip_all, + fields( + worker.id, + db.query.text, + ), + err, + )] + async fn register( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + ) -> Result { + let now = clock.now(); + let worker_id = Ulid::from_datetime_with_source(now.into(), rng); + tracing::Span::current().record("worker.id", tracing::field::display(worker_id)); + + sqlx::query!( + r#" + INSERT INTO queue_workers (queue_worker_id, registered_at, last_seen_at) + VALUES ($1, $2, $2) + "#, + Uuid::from(worker_id), + now, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(Worker { id: worker_id }) + } + + #[tracing::instrument( + name = "db.queue_worker.heartbeat", + skip_all, + fields( + %worker.id, + db.query.text, + ), + err, + )] + async fn heartbeat(&mut self, clock: &dyn Clock, worker: &Worker) -> Result<(), Self::Error> { + let now = clock.now(); + let res = sqlx::query!( + r#" + UPDATE queue_workers + SET last_seen_at = $2 + WHERE queue_worker_id = $1 AND shutdown_at IS NULL + "#, + Uuid::from(worker.id), + now, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + // If no row was updated, the worker was shutdown so we return an error + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.queue_worker.shutdown", + skip_all, + fields( + %worker.id, + db.query.text, + ), + err, + )] + async fn shutdown(&mut self, clock: &dyn Clock, worker: &Worker) -> Result<(), Self::Error> { + let now = clock.now(); + let res = sqlx::query!( + r#" + UPDATE queue_workers + SET shutdown_at = $2 + WHERE queue_worker_id = $1 + "#, + Uuid::from(worker.id), + now, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + // Remove the leader lease if we were holding it + let res = sqlx::query!( + r#" + DELETE FROM queue_leader + WHERE queue_worker_id = $1 + "#, + Uuid::from(worker.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + // If we were holding the leader lease, notify workers + if res.rows_affected() > 0 { + sqlx::query!( + r#" + NOTIFY queue_leader_stepdown + "#, + ) + .traced() + .execute(&mut *self.conn) + .await?; + } + + Ok(()) + } + + #[tracing::instrument( + name = "db.queue_worker.shutdown_dead_workers", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn shutdown_dead_workers( + &mut self, + clock: &dyn Clock, + threshold: Duration, + ) -> Result<(), Self::Error> { + // Here the threshold is usually set to a few minutes, so we don't need to use + // the database time, as we can assume worker clocks have less than a minute + // skew between each other, else other things would break + let now = clock.now(); + sqlx::query!( + r#" + UPDATE queue_workers + SET shutdown_at = $1 + WHERE shutdown_at IS NULL + AND last_seen_at < $2 + "#, + now, + now - threshold, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.queue_worker.remove_leader_lease_if_expired", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn remove_leader_lease_if_expired( + &mut self, + _clock: &dyn Clock, + ) -> Result<(), Self::Error> { + // `expires_at` is a rare exception where we use the database time, as this + // would be very sensitive to clock skew between workers + sqlx::query!( + r#" + DELETE FROM queue_leader + WHERE expires_at < NOW() + "#, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.queue_worker.try_get_leader_lease", + skip_all, + fields( + %worker.id, + db.query.text, + ), + err, + )] + async fn try_get_leader_lease( + &mut self, + clock: &dyn Clock, + worker: &Worker, + ) -> Result { + let now = clock.now(); + // The queue_leader table is meant to only have a single row, which conflicts on + // the `active` column + + // If there is a conflict, we update the `expires_at` column ONLY IF the current + // leader is ourselves. + + // `expires_at` is a rare exception where we use the database time, as this + // would be very sensitive to clock skew between workers + let res = sqlx::query!( + r#" + INSERT INTO queue_leader (elected_at, expires_at, queue_worker_id) + VALUES ($1, NOW() + INTERVAL '5 seconds', $2) + ON CONFLICT (active) + DO UPDATE SET expires_at = EXCLUDED.expires_at + WHERE queue_leader.queue_worker_id = $2 + "#, + now, + Uuid::from(worker.id) + ) + .traced() + .execute(&mut *self.conn) + .await?; + + // We can then detect whether we are the leader or not by checking how many rows + // were affected by the upsert + let am_i_the_leader = res.rows_affected() == 1; + + Ok(am_i_the_leader) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/repository.rs b/matrix-authentication-service/crates/storage-pg/src/repository.rs new file mode 100644 index 00000000..210d66a0 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/repository.rs @@ -0,0 +1,363 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::ops::{Deref, DerefMut}; + +use async_trait::async_trait; +use futures_util::{FutureExt, TryFutureExt, future::BoxFuture}; +use mas_storage::{ + BoxRepository, BoxRepositoryFactory, MapErr, Repository, RepositoryAccess, RepositoryError, + RepositoryFactory, RepositoryTransaction, + app_session::AppSessionRepository, + compat::{ + CompatAccessTokenRepository, CompatRefreshTokenRepository, CompatSessionRepository, + CompatSsoLoginRepository, + }, + oauth2::{ + OAuth2AccessTokenRepository, OAuth2AuthorizationGrantRepository, OAuth2ClientRepository, + OAuth2DeviceCodeGrantRepository, OAuth2RefreshTokenRepository, OAuth2SessionRepository, + }, + personal::PersonalSessionRepository, + policy_data::PolicyDataRepository, + queue::{QueueJobRepository, QueueScheduleRepository, QueueWorkerRepository}, + upstream_oauth2::{ + UpstreamOAuthLinkRepository, UpstreamOAuthProviderRepository, + UpstreamOAuthSessionRepository, + }, + user::{ + BrowserSessionRepository, UserEmailRepository, UserPasswordRepository, + UserRecoveryRepository, UserRegistrationRepository, UserRegistrationTokenRepository, + UserRepository, UserTermsRepository, + }, +}; +use sqlx::{PgConnection, PgPool, Postgres, Transaction}; +use tracing::Instrument; + +use crate::{ + DatabaseError, + app_session::PgAppSessionRepository, + compat::{ + PgCompatAccessTokenRepository, PgCompatRefreshTokenRepository, PgCompatSessionRepository, + PgCompatSsoLoginRepository, + }, + oauth2::{ + PgOAuth2AccessTokenRepository, PgOAuth2AuthorizationGrantRepository, + PgOAuth2ClientRepository, PgOAuth2DeviceCodeGrantRepository, + PgOAuth2RefreshTokenRepository, PgOAuth2SessionRepository, + }, + personal::{PgPersonalAccessTokenRepository, PgPersonalSessionRepository}, + policy_data::PgPolicyDataRepository, + queue::{ + job::PgQueueJobRepository, schedule::PgQueueScheduleRepository, + worker::PgQueueWorkerRepository, + }, + telemetry::DB_CLIENT_CONNECTIONS_CREATE_TIME_HISTOGRAM, + upstream_oauth2::{ + PgUpstreamOAuthLinkRepository, PgUpstreamOAuthProviderRepository, + PgUpstreamOAuthSessionRepository, + }, + user::{ + PgBrowserSessionRepository, PgUserEmailRepository, PgUserPasswordRepository, + PgUserRecoveryRepository, PgUserRegistrationRepository, PgUserRegistrationTokenRepository, + PgUserRepository, PgUserTermsRepository, + }, +}; + +/// An implementation of the [`RepositoryFactory`] trait backed by a PostgreSQL +/// connection pool. +#[derive(Clone)] +pub struct PgRepositoryFactory { + pool: PgPool, +} + +impl PgRepositoryFactory { + /// Create a new [`PgRepositoryFactory`] from a PostgreSQL connection pool. + #[must_use] + pub fn new(pool: PgPool) -> Self { + Self { pool } + } + + /// Box the factory + #[must_use] + pub fn boxed(self) -> BoxRepositoryFactory { + Box::new(self) + } + + /// Get the underlying PostgreSQL connection pool + #[must_use] + pub fn pool(&self) -> PgPool { + self.pool.clone() + } +} + +#[async_trait] +impl RepositoryFactory for PgRepositoryFactory { + async fn create(&self) -> Result { + let start = std::time::Instant::now(); + let repo = PgRepository::from_pool(&self.pool) + .await + .map_err(RepositoryError::from_error)? + .boxed(); + + // Measure the time it took to create the connection + let duration = start.elapsed(); + let duration_ms = duration.as_millis().try_into().unwrap_or(u64::MAX); + DB_CLIENT_CONNECTIONS_CREATE_TIME_HISTOGRAM.record(duration_ms, &[]); + + Ok(repo) + } +} + +/// An implementation of the [`Repository`] trait backed by a PostgreSQL +/// transaction. +pub struct PgRepository> { + conn: C, +} + +impl PgRepository { + /// Create a new [`PgRepository`] from a PostgreSQL connection pool, + /// starting a transaction. + /// + /// # Errors + /// + /// Returns a [`DatabaseError`] if the transaction could not be started. + pub async fn from_pool(pool: &PgPool) -> Result { + let txn = pool.begin().await?; + Ok(Self::from_conn(txn)) + } + + /// Transform the repository into a type-erased [`BoxRepository`] + pub fn boxed(self) -> BoxRepository { + Box::new(MapErr::new(self, RepositoryError::from_error)) + } +} + +impl PgRepository { + /// Create a new [`PgRepository`] from an existing PostgreSQL connection + /// with a transaction + pub fn from_conn(conn: C) -> Self { + PgRepository { conn } + } + + /// Consume this [`PgRepository`], returning the underlying connection. + pub fn into_inner(self) -> C { + self.conn + } +} + +impl AsRef for PgRepository { + fn as_ref(&self) -> &C { + &self.conn + } +} + +impl AsMut for PgRepository { + fn as_mut(&mut self) -> &mut C { + &mut self.conn + } +} + +impl Deref for PgRepository { + type Target = C; + + fn deref(&self) -> &Self::Target { + &self.conn + } +} + +impl DerefMut for PgRepository { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.conn + } +} + +impl Repository for PgRepository {} + +impl RepositoryTransaction for PgRepository { + type Error = DatabaseError; + + fn save(self: Box) -> BoxFuture<'static, Result<(), Self::Error>> { + let span = tracing::info_span!("db.save"); + self.conn + .commit() + .map_err(DatabaseError::from) + .instrument(span) + .boxed() + } + + fn cancel(self: Box) -> BoxFuture<'static, Result<(), Self::Error>> { + let span = tracing::info_span!("db.cancel"); + self.conn + .rollback() + .map_err(DatabaseError::from) + .instrument(span) + .boxed() + } +} + +impl RepositoryAccess for PgRepository +where + C: AsMut + Send, +{ + type Error = DatabaseError; + + fn upstream_oauth_link<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgUpstreamOAuthLinkRepository::new(self.conn.as_mut())) + } + + fn upstream_oauth_provider<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgUpstreamOAuthProviderRepository::new(self.conn.as_mut())) + } + + fn upstream_oauth_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgUpstreamOAuthSessionRepository::new(self.conn.as_mut())) + } + + fn user<'c>(&'c mut self) -> Box + 'c> { + Box::new(PgUserRepository::new(self.conn.as_mut())) + } + + fn user_email<'c>(&'c mut self) -> Box + 'c> { + Box::new(PgUserEmailRepository::new(self.conn.as_mut())) + } + + fn user_password<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgUserPasswordRepository::new(self.conn.as_mut())) + } + + fn user_recovery<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgUserRecoveryRepository::new(self.conn.as_mut())) + } + + fn user_terms<'c>(&'c mut self) -> Box + 'c> { + Box::new(PgUserTermsRepository::new(self.conn.as_mut())) + } + + fn user_registration<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgUserRegistrationRepository::new(self.conn.as_mut())) + } + + fn user_registration_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgUserRegistrationTokenRepository::new(self.conn.as_mut())) + } + + fn browser_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgBrowserSessionRepository::new(self.conn.as_mut())) + } + + fn app_session<'c>(&'c mut self) -> Box + 'c> { + Box::new(PgAppSessionRepository::new(self.conn.as_mut())) + } + + fn oauth2_client<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgOAuth2ClientRepository::new(self.conn.as_mut())) + } + + fn oauth2_authorization_grant<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgOAuth2AuthorizationGrantRepository::new( + self.conn.as_mut(), + )) + } + + fn oauth2_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgOAuth2SessionRepository::new(self.conn.as_mut())) + } + + fn oauth2_access_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgOAuth2AccessTokenRepository::new(self.conn.as_mut())) + } + + fn oauth2_refresh_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgOAuth2RefreshTokenRepository::new(self.conn.as_mut())) + } + + fn oauth2_device_code_grant<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgOAuth2DeviceCodeGrantRepository::new(self.conn.as_mut())) + } + + fn compat_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgCompatSessionRepository::new(self.conn.as_mut())) + } + + fn compat_sso_login<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgCompatSsoLoginRepository::new(self.conn.as_mut())) + } + + fn compat_access_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgCompatAccessTokenRepository::new(self.conn.as_mut())) + } + + fn compat_refresh_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgCompatRefreshTokenRepository::new(self.conn.as_mut())) + } + + fn personal_access_token<'c>( + &'c mut self, + ) -> Box + 'c> + { + Box::new(PgPersonalAccessTokenRepository::new(self.conn.as_mut())) + } + + fn personal_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgPersonalSessionRepository::new(self.conn.as_mut())) + } + + fn queue_worker<'c>(&'c mut self) -> Box + 'c> { + Box::new(PgQueueWorkerRepository::new(self.conn.as_mut())) + } + + fn queue_job<'c>(&'c mut self) -> Box + 'c> { + Box::new(PgQueueJobRepository::new(self.conn.as_mut())) + } + + fn queue_schedule<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(PgQueueScheduleRepository::new(self.conn.as_mut())) + } + + fn policy_data<'c>(&'c mut self) -> Box + 'c> { + Box::new(PgPolicyDataRepository::new(self.conn.as_mut())) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/telemetry.rs b/matrix-authentication-service/crates/storage-pg/src/telemetry.rs new file mode 100644 index 00000000..4771f22c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/telemetry.rs @@ -0,0 +1,31 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::LazyLock; + +use opentelemetry::{ + InstrumentationScope, + metrics::{Histogram, Meter}, +}; +use opentelemetry_semantic_conventions as semcov; + +static SCOPE: LazyLock = LazyLock::new(|| { + InstrumentationScope::builder(env!("CARGO_PKG_NAME")) + .with_version(env!("CARGO_PKG_VERSION")) + .with_schema_url(semcov::SCHEMA_URL) + .build() +}); + +static METER: LazyLock = + LazyLock::new(|| opentelemetry::global::meter_with_scope(SCOPE.clone())); + +pub(crate) static DB_CLIENT_CONNECTIONS_CREATE_TIME_HISTOGRAM: LazyLock> = + LazyLock::new(|| { + METER + .u64_histogram("db.client.connections.create_time") + .with_description("The time it took to create a new connection.") + .with_unit("ms") + .build() + }); diff --git a/matrix-authentication-service/crates/storage-pg/src/tracing.rs b/matrix-authentication-service/crates/storage-pg/src/tracing.rs new file mode 100644 index 00000000..4cab1fd7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/tracing.rs @@ -0,0 +1,33 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use opentelemetry_semantic_conventions::attribute::DB_QUERY_TEXT; +use tracing::Span; + +/// An extension trait for [`sqlx::Execute`] that records the SQL statement as +/// `db.query.text` in a tracing span +pub trait ExecuteExt<'q, DB>: Sized { + /// Records the statement as `db.query.text` in the current span + #[must_use] + fn traced(self) -> Self { + self.record(&Span::current()) + } + + /// Records the statement as `db.query.text` in the given span + #[must_use] + fn record(self, span: &Span) -> Self; +} + +impl<'q, DB, T> ExecuteExt<'q, DB> for T +where + T: sqlx::Execute<'q, DB>, + DB: sqlx::Database, +{ + fn record(self, span: &Span) -> Self { + span.record(DB_QUERY_TEXT, self.sql()); + self + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/link.rs b/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/link.rs new file mode 100644 index 00000000..8814bc3e --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/link.rs @@ -0,0 +1,502 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{Clock, UpstreamOAuthLink, UpstreamOAuthProvider, User}; +use mas_storage::{ + Page, Pagination, + pagination::Node, + upstream_oauth2::{UpstreamOAuthLinkFilter, UpstreamOAuthLinkRepository}, +}; +use opentelemetry_semantic_conventions::trace::DB_QUERY_TEXT; +use rand::RngCore; +use sea_query::{Expr, PostgresQueryBuilder, Query, enum_def}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use tracing::Instrument; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseError, + filter::{Filter, StatementExt}, + iden::{UpstreamOAuthLinks, UpstreamOAuthProviders}, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +/// An implementation of [`UpstreamOAuthLinkRepository`] for a PostgreSQL +/// connection +pub struct PgUpstreamOAuthLinkRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUpstreamOAuthLinkRepository<'c> { + /// Create a new [`PgUpstreamOAuthLinkRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[derive(sqlx::FromRow)] +#[enum_def] +struct LinkLookup { + upstream_oauth_link_id: Uuid, + upstream_oauth_provider_id: Uuid, + user_id: Option, + subject: String, + human_account_name: Option, + created_at: DateTime, +} + +impl Node for LinkLookup { + fn cursor(&self) -> Ulid { + self.upstream_oauth_link_id.into() + } +} + +impl From for UpstreamOAuthLink { + fn from(value: LinkLookup) -> Self { + UpstreamOAuthLink { + id: Ulid::from(value.upstream_oauth_link_id), + provider_id: Ulid::from(value.upstream_oauth_provider_id), + user_id: value.user_id.map(Ulid::from), + subject: value.subject, + human_account_name: value.human_account_name, + created_at: value.created_at, + } + } +} + +impl Filter for UpstreamOAuthLinkFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.user().map(|user| { + Expr::col((UpstreamOAuthLinks::Table, UpstreamOAuthLinks::UserId)) + .eq(Uuid::from(user.id)) + })) + .add_option(self.provider().map(|provider| { + Expr::col(( + UpstreamOAuthLinks::Table, + UpstreamOAuthLinks::UpstreamOAuthProviderId, + )) + .eq(Uuid::from(provider.id)) + })) + .add_option(self.provider_enabled().map(|enabled| { + Expr::col(( + UpstreamOAuthLinks::Table, + UpstreamOAuthLinks::UpstreamOAuthProviderId, + )) + .eq(Expr::any( + Query::select() + .expr(Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::UpstreamOAuthProviderId, + ))) + .from(UpstreamOAuthProviders::Table) + .and_where( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::DisabledAt, + )) + .is_null() + .eq(enabled), + ) + .take(), + )) + })) + .add_option(self.subject().map(|subject| { + Expr::col((UpstreamOAuthLinks::Table, UpstreamOAuthLinks::Subject)).eq(subject) + })) + } +} + +#[async_trait] +impl UpstreamOAuthLinkRepository for PgUpstreamOAuthLinkRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.upstream_oauth_link.lookup", + skip_all, + fields( + db.query.text, + upstream_oauth_link.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + LinkLookup, + r#" + SELECT + upstream_oauth_link_id, + upstream_oauth_provider_id, + user_id, + subject, + human_account_name, + created_at + FROM upstream_oauth_links + WHERE upstream_oauth_link_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await? + .map(Into::into); + + Ok(res) + } + + #[tracing::instrument( + name = "db.upstream_oauth_link.find_by_subject", + skip_all, + fields( + db.query.text, + upstream_oauth_link.subject = subject, + %upstream_oauth_provider.id, + upstream_oauth_provider.issuer = upstream_oauth_provider.issuer, + %upstream_oauth_provider.client_id, + ), + err, + )] + async fn find_by_subject( + &mut self, + upstream_oauth_provider: &UpstreamOAuthProvider, + subject: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + LinkLookup, + r#" + SELECT + upstream_oauth_link_id, + upstream_oauth_provider_id, + user_id, + subject, + human_account_name, + created_at + FROM upstream_oauth_links + WHERE upstream_oauth_provider_id = $1 + AND subject = $2 + "#, + Uuid::from(upstream_oauth_provider.id), + subject, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await? + .map(Into::into); + + Ok(res) + } + + #[tracing::instrument( + name = "db.upstream_oauth_link.add", + skip_all, + fields( + db.query.text, + upstream_oauth_link.id, + upstream_oauth_link.subject = subject, + upstream_oauth_link.human_account_name = human_account_name, + %upstream_oauth_provider.id, + upstream_oauth_provider.issuer = upstream_oauth_provider.issuer, + %upstream_oauth_provider.client_id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + upstream_oauth_provider: &UpstreamOAuthProvider, + subject: String, + human_account_name: Option, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("upstream_oauth_link.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO upstream_oauth_links ( + upstream_oauth_link_id, + upstream_oauth_provider_id, + user_id, + subject, + human_account_name, + created_at + ) VALUES ($1, $2, NULL, $3, $4, $5) + "#, + Uuid::from(id), + Uuid::from(upstream_oauth_provider.id), + &subject, + human_account_name.as_deref(), + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(UpstreamOAuthLink { + id, + provider_id: upstream_oauth_provider.id, + user_id: None, + subject, + human_account_name, + created_at, + }) + } + + #[tracing::instrument( + name = "db.upstream_oauth_link.associate_to_user", + skip_all, + fields( + db.query.text, + %upstream_oauth_link.id, + %upstream_oauth_link.subject, + %user.id, + %user.username, + ), + err, + )] + async fn associate_to_user( + &mut self, + upstream_oauth_link: &UpstreamOAuthLink, + user: &User, + ) -> Result<(), Self::Error> { + sqlx::query!( + r#" + UPDATE upstream_oauth_links + SET user_id = $1 + WHERE upstream_oauth_link_id = $2 + "#, + Uuid::from(user.id), + Uuid::from(upstream_oauth_link.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.upstream_oauth_link.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: UpstreamOAuthLinkFilter<'_>, + pagination: Pagination, + ) -> Result, DatabaseError> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col(( + UpstreamOAuthLinks::Table, + UpstreamOAuthLinks::UpstreamOAuthLinkId, + )), + LinkLookupIden::UpstreamOauthLinkId, + ) + .expr_as( + Expr::col(( + UpstreamOAuthLinks::Table, + UpstreamOAuthLinks::UpstreamOAuthProviderId, + )), + LinkLookupIden::UpstreamOauthProviderId, + ) + .expr_as( + Expr::col((UpstreamOAuthLinks::Table, UpstreamOAuthLinks::UserId)), + LinkLookupIden::UserId, + ) + .expr_as( + Expr::col((UpstreamOAuthLinks::Table, UpstreamOAuthLinks::Subject)), + LinkLookupIden::Subject, + ) + .expr_as( + Expr::col(( + UpstreamOAuthLinks::Table, + UpstreamOAuthLinks::HumanAccountName, + )), + LinkLookupIden::HumanAccountName, + ) + .expr_as( + Expr::col((UpstreamOAuthLinks::Table, UpstreamOAuthLinks::CreatedAt)), + LinkLookupIden::CreatedAt, + ) + .from(UpstreamOAuthLinks::Table) + .apply_filter(filter) + .generate_pagination( + ( + UpstreamOAuthLinks::Table, + UpstreamOAuthLinks::UpstreamOAuthLinkId, + ), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination.process(edges).map(UpstreamOAuthLink::from); + + Ok(page) + } + + #[tracing::instrument( + name = "db.upstream_oauth_link.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count(&mut self, filter: UpstreamOAuthLinkFilter<'_>) -> Result { + let (sql, arguments) = Query::select() + .expr( + Expr::col(( + UpstreamOAuthLinks::Table, + UpstreamOAuthLinks::UpstreamOAuthLinkId, + )) + .count(), + ) + .from(UpstreamOAuthLinks::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.upstream_oauth_link.remove", + skip_all, + fields( + db.query.text, + upstream_oauth_link.id, + upstream_oauth_link.provider_id, + %upstream_oauth_link.subject, + ), + err, + )] + async fn remove( + &mut self, + clock: &dyn Clock, + upstream_oauth_link: UpstreamOAuthLink, + ) -> Result<(), Self::Error> { + // Unlink the authorization sessions first, as they have a foreign key + // constraint on the links. + let span = tracing::info_span!( + "db.upstream_oauth_link.remove.unlink", + { DB_QUERY_TEXT } = tracing::field::Empty + ); + sqlx::query!( + r#" + UPDATE upstream_oauth_authorization_sessions SET + upstream_oauth_link_id = NULL, + unlinked_at = $2 + WHERE upstream_oauth_link_id = $1 + "#, + Uuid::from(upstream_oauth_link.id), + clock.now() + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + + // Then delete the link itself + let span = tracing::info_span!( + "db.upstream_oauth_link.remove.delete", + { DB_QUERY_TEXT } = tracing::field::Empty + ); + let res = sqlx::query!( + r#" + DELETE FROM upstream_oauth_links + WHERE upstream_oauth_link_id = $1 + "#, + Uuid::from(upstream_oauth_link.id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.upstream_oauth_link.cleanup_orphaned", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup_orphaned( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error> { + // Use ULID cursor-based pagination for orphaned links only. + // We only delete links that have no user associated with them. + // `MAX(uuid)` isn't a thing in Postgres, so we aggregate on the client side. + let res = sqlx::query_scalar!( + r#" + WITH + to_delete AS ( + SELECT upstream_oauth_link_id + FROM upstream_oauth_links + WHERE user_id IS NULL + AND ($1::uuid IS NULL OR upstream_oauth_link_id > $1) + AND upstream_oauth_link_id <= $2 + ORDER BY upstream_oauth_link_id + LIMIT $3 + ), + deleted_sessions AS ( + DELETE FROM upstream_oauth_authorization_sessions + USING to_delete + WHERE upstream_oauth_authorization_sessions.upstream_oauth_link_id = to_delete.upstream_oauth_link_id + ) + DELETE FROM upstream_oauth_links + USING to_delete + WHERE upstream_oauth_links.upstream_oauth_link_id = to_delete.upstream_oauth_link_id + RETURNING upstream_oauth_links.upstream_oauth_link_id + "#, + since.map(Uuid::from), + Uuid::from(until), + i64::try_from(limit).unwrap_or(i64::MAX) + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let count = res.len(); + let max_id = res.into_iter().max(); + + Ok((count, max_id.map(Ulid::from))) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/mod.rs b/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/mod.rs new file mode 100644 index 00000000..12df9d5f --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/mod.rs @@ -0,0 +1,668 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing the PostgreSQL implementation of the repositories +//! related to the upstream OAuth 2.0 providers + +mod link; +mod provider; +mod session; + +pub use self::{ + link::PgUpstreamOAuthLinkRepository, provider::PgUpstreamOAuthProviderRepository, + session::PgUpstreamOAuthSessionRepository, +}; + +#[cfg(test)] +mod tests { + use chrono::Duration; + use mas_data_model::{ + UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderTokenAuthMethod, clock::MockClock, + }; + use mas_iana::jose::JsonWebSignatureAlg; + use mas_storage::{ + Pagination, RepositoryAccess, + upstream_oauth2::{ + UpstreamOAuthLinkFilter, UpstreamOAuthLinkRepository, UpstreamOAuthProviderFilter, + UpstreamOAuthProviderParams, UpstreamOAuthProviderRepository, + UpstreamOAuthSessionFilter, UpstreamOAuthSessionRepository, + }, + user::UserRepository, + }; + use oauth2_types::scope::{OPENID, Scope}; + use rand::SeedableRng; + use sqlx::PgPool; + + use crate::PgRepository; + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_repository(pool: PgPool) { + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + + // The provider list should be empty at the start + let all_providers = repo.upstream_oauth_provider().all_enabled().await.unwrap(); + assert!(all_providers.is_empty()); + + // Let's add a provider + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: None, + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + token_endpoint_signing_alg: None, + client_id: "client-id".to_owned(), + encrypted_client_secret: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + token_endpoint_override: None, + authorization_endpoint_override: None, + userinfo_endpoint_override: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + + // Look it up in the database + let provider = repo + .upstream_oauth_provider() + .lookup(provider.id) + .await + .unwrap() + .expect("provider to be found in the database"); + assert_eq!(provider.issuer.as_deref(), Some("https://example.com/")); + assert_eq!(provider.client_id, "client-id"); + + // It should be in the list of all providers + let providers = repo.upstream_oauth_provider().all_enabled().await.unwrap(); + assert_eq!(providers.len(), 1); + assert_eq!(providers[0].issuer.as_deref(), Some("https://example.com/")); + assert_eq!(providers[0].client_id, "client-id"); + + // Start a session + let session = repo + .upstream_oauth_session() + .add( + &mut rng, + &clock, + &provider, + "some-state".to_owned(), + None, + Some("some-nonce".to_owned()), + ) + .await + .unwrap(); + + // Look it up in the database + let session = repo + .upstream_oauth_session() + .lookup(session.id) + .await + .unwrap() + .expect("session to be found in the database"); + assert_eq!(session.provider_id, provider.id); + assert_eq!(session.link_id(), None); + assert!(session.is_pending()); + assert!(!session.is_completed()); + assert!(!session.is_consumed()); + + // Create a link + let link = repo + .upstream_oauth_link() + .add(&mut rng, &clock, &provider, "a-subject".to_owned(), None) + .await + .unwrap(); + + // We can look it up by its ID + repo.upstream_oauth_link() + .lookup(link.id) + .await + .unwrap() + .expect("link to be found in database"); + + // or by its subject + let link = repo + .upstream_oauth_link() + .find_by_subject(&provider, "a-subject") + .await + .unwrap() + .expect("link to be found in database"); + assert_eq!(link.subject, "a-subject"); + assert_eq!(link.provider_id, provider.id); + + let session = repo + .upstream_oauth_session() + .complete_with_link(&clock, session, &link, None, None, None, None) + .await + .unwrap(); + // Reload the session + let session = repo + .upstream_oauth_session() + .lookup(session.id) + .await + .unwrap() + .expect("session to be found in the database"); + assert!(session.is_completed()); + assert!(!session.is_consumed()); + assert_eq!(session.link_id(), Some(link.id)); + + // We need to create a user and start a browser session to consume the session + let user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + let browser_session = repo + .browser_session() + .add(&mut rng, &clock, &user, None) + .await + .unwrap(); + + let session = repo + .upstream_oauth_session() + .consume(&clock, session, &browser_session) + .await + .unwrap(); + + // Reload the session + let session = repo + .upstream_oauth_session() + .lookup(session.id) + .await + .unwrap() + .expect("session to be found in the database"); + assert!(session.is_consumed()); + + repo.upstream_oauth_link() + .associate_to_user(&link, &user) + .await + .unwrap(); + + // XXX: we should also try other combinations of the filter + let filter = UpstreamOAuthLinkFilter::new() + .for_user(&user) + .for_provider(&provider) + .for_subject("a-subject") + .enabled_providers_only(); + + let links = repo + .upstream_oauth_link() + .list(filter, Pagination::first(10)) + .await + .unwrap(); + assert!(!links.has_previous_page); + assert!(!links.has_next_page); + assert_eq!(links.edges.len(), 1); + assert_eq!(links.edges[0].node.id, link.id); + assert_eq!(links.edges[0].node.user_id, Some(user.id)); + + assert_eq!(repo.upstream_oauth_link().count(filter).await.unwrap(), 1); + + // There should be exactly one enabled provider + assert_eq!( + repo.upstream_oauth_provider() + .count(UpstreamOAuthProviderFilter::new()) + .await + .unwrap(), + 1 + ); + assert_eq!( + repo.upstream_oauth_provider() + .count(UpstreamOAuthProviderFilter::new().enabled_only()) + .await + .unwrap(), + 1 + ); + assert_eq!( + repo.upstream_oauth_provider() + .count(UpstreamOAuthProviderFilter::new().disabled_only()) + .await + .unwrap(), + 0 + ); + + // Disable the provider + repo.upstream_oauth_provider() + .disable(&clock, provider.clone()) + .await + .unwrap(); + + // There should be exactly one disabled provider + assert_eq!( + repo.upstream_oauth_provider() + .count(UpstreamOAuthProviderFilter::new()) + .await + .unwrap(), + 1 + ); + assert_eq!( + repo.upstream_oauth_provider() + .count(UpstreamOAuthProviderFilter::new().enabled_only()) + .await + .unwrap(), + 0 + ); + assert_eq!( + repo.upstream_oauth_provider() + .count(UpstreamOAuthProviderFilter::new().disabled_only()) + .await + .unwrap(), + 1 + ); + + // Test listing and counting sessions + let session_filter = UpstreamOAuthSessionFilter::new().for_provider(&provider); + + // Count the sessions for the provider + let session_count = repo + .upstream_oauth_session() + .count(session_filter) + .await + .unwrap(); + assert_eq!(session_count, 1); + + // List the sessions for the provider + let session_page = repo + .upstream_oauth_session() + .list(session_filter, Pagination::first(10)) + .await + .unwrap(); + + assert_eq!(session_page.edges.len(), 1); + assert_eq!(session_page.edges[0].node.id, session.id); + assert!(!session_page.has_next_page); + assert!(!session_page.has_previous_page); + + // Try deleting the provider + repo.upstream_oauth_provider() + .delete(provider) + .await + .unwrap(); + assert_eq!( + repo.upstream_oauth_provider() + .count(UpstreamOAuthProviderFilter::new()) + .await + .unwrap(), + 0 + ); + } + + /// Test that the pagination works as expected in the upstream OAuth + /// provider repository + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_provider_repository_pagination(pool: PgPool) { + let scope = Scope::from_iter([OPENID]); + + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + + let filter = UpstreamOAuthProviderFilter::new(); + + // Count the number of providers before we start + assert_eq!( + repo.upstream_oauth_provider().count(filter).await.unwrap(), + 0 + ); + + let mut ids = Vec::with_capacity(20); + // Create 20 providers + for idx in 0..20 { + let client_id = format!("client-{idx}"); + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &clock, + UpstreamOAuthProviderParams { + issuer: None, + human_name: None, + brand_name: None, + scope: scope.clone(), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id, + encrypted_client_secret: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + token_endpoint_override: None, + authorization_endpoint_override: None, + userinfo_endpoint_override: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + ids.push(provider.id); + clock.advance(Duration::microseconds(10 * 1000 * 1000)); + } + + // Now we have 20 providers + assert_eq!( + repo.upstream_oauth_provider().count(filter).await.unwrap(), + 20 + ); + + // Lookup the first 10 items + let page = repo + .upstream_oauth_provider() + .list(filter, Pagination::first(10)) + .await + .unwrap(); + + // It returned the first 10 items + assert!(page.has_next_page); + let edge_ids: Vec<_> = page.edges.iter().map(|p| p.node.id).collect(); + assert_eq!(&edge_ids, &ids[..10]); + + // Getting the same page with the "enabled only" filter should return the same + // results + let other_page = repo + .upstream_oauth_provider() + .list(filter.enabled_only(), Pagination::first(10)) + .await + .unwrap(); + + assert_eq!(page, other_page); + + // Lookup the next 10 items + let page = repo + .upstream_oauth_provider() + .list(filter, Pagination::first(10).after(ids[9])) + .await + .unwrap(); + + // It returned the next 10 items + assert!(!page.has_next_page); + let edge_ids: Vec<_> = page.edges.iter().map(|p| p.node.id).collect(); + assert_eq!(&edge_ids, &ids[10..]); + + // Lookup the last 10 items + let page = repo + .upstream_oauth_provider() + .list(filter, Pagination::last(10)) + .await + .unwrap(); + + // It returned the last 10 items + assert!(page.has_previous_page); + let edge_ids: Vec<_> = page.edges.iter().map(|p| p.node.id).collect(); + assert_eq!(&edge_ids, &ids[10..]); + + // Lookup the previous 10 items + let page = repo + .upstream_oauth_provider() + .list(filter, Pagination::last(10).before(ids[10])) + .await + .unwrap(); + + // It returned the previous 10 items + assert!(!page.has_previous_page); + let edge_ids: Vec<_> = page.edges.iter().map(|p| p.node.id).collect(); + assert_eq!(&edge_ids, &ids[..10]); + + // Lookup 10 items between two IDs + let page = repo + .upstream_oauth_provider() + .list(filter, Pagination::first(10).after(ids[5]).before(ids[8])) + .await + .unwrap(); + + // It returned the items in between + assert!(!page.has_next_page); + let edge_ids: Vec<_> = page.edges.iter().map(|p| p.node.id).collect(); + assert_eq!(&edge_ids, &ids[6..8]); + + // There should not be any disabled providers + assert!( + repo.upstream_oauth_provider() + .list( + UpstreamOAuthProviderFilter::new().disabled_only(), + Pagination::first(1) + ) + .await + .unwrap() + .edges + .is_empty() + ); + } + + /// Test that the pagination works as expected in the upstream OAuth + /// session repository + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_session_repository_pagination(pool: PgPool) { + let scope = Scope::from_iter([OPENID]); + + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + + // Create a provider + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: None, + brand_name: None, + scope, + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + token_endpoint_signing_alg: None, + client_id: "client-id".to_owned(), + encrypted_client_secret: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + token_endpoint_override: None, + authorization_endpoint_override: None, + userinfo_endpoint_override: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + + let filter = UpstreamOAuthSessionFilter::new().for_provider(&provider); + + // Count the number of sessions before we start + assert_eq!( + repo.upstream_oauth_session().count(filter).await.unwrap(), + 0 + ); + + let mut links = Vec::with_capacity(3); + for subject in ["alice", "bob", "charlie"] { + let link = repo + .upstream_oauth_link() + .add(&mut rng, &clock, &provider, subject.to_owned(), None) + .await + .unwrap(); + links.push(link); + } + + let mut ids = Vec::with_capacity(20); + let sids = ["one", "two"].into_iter().cycle(); + // Create 20 sessions + for (idx, (link, sid)) in links.iter().cycle().zip(sids).enumerate().take(20) { + let state = format!("state-{idx}"); + let session = repo + .upstream_oauth_session() + .add(&mut rng, &clock, &provider, state, None, None) + .await + .unwrap(); + let id_token_claims = serde_json::json!({ + "sub": link.subject, + "sid": sid, + "aud": provider.client_id, + "iss": "https://example.com/", + }); + let session = repo + .upstream_oauth_session() + .complete_with_link( + &clock, + session, + link, + None, + Some(id_token_claims), + None, + None, + ) + .await + .unwrap(); + ids.push(session.id); + clock.advance(Duration::microseconds(10 * 1000 * 1000)); + } + + // Now we have 20 sessions + assert_eq!( + repo.upstream_oauth_session().count(filter).await.unwrap(), + 20 + ); + + // Lookup the first 10 items + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::first(10)) + .await + .unwrap(); + + // It returned the first 10 items + assert!(page.has_next_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.node.id).collect(); + assert_eq!(&edge_ids, &ids[..10]); + + // Lookup the next 10 items + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::first(10).after(ids[9])) + .await + .unwrap(); + + // It returned the next 10 items + assert!(!page.has_next_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.node.id).collect(); + assert_eq!(&edge_ids, &ids[10..]); + + // Lookup the last 10 items + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::last(10)) + .await + .unwrap(); + + // It returned the last 10 items + assert!(page.has_previous_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.node.id).collect(); + assert_eq!(&edge_ids, &ids[10..]); + + // Lookup the previous 10 items + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::last(10).before(ids[10])) + .await + .unwrap(); + + // It returned the previous 10 items + assert!(!page.has_previous_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.node.id).collect(); + assert_eq!(&edge_ids, &ids[..10]); + + // Lookup 5 items between two IDs + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::first(10).after(ids[5]).before(ids[11])) + .await + .unwrap(); + + // It returned the items in between + assert!(!page.has_next_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.node.id).collect(); + assert_eq!(&edge_ids, &ids[6..11]); + + // Check the sub/sid filters + assert_eq!( + repo.upstream_oauth_session() + .count(filter.with_sub_claim("alice").with_sid_claim("one")) + .await + .unwrap(), + 4 + ); + assert_eq!( + repo.upstream_oauth_session() + .count(filter.with_sub_claim("bob").with_sid_claim("two")) + .await + .unwrap(), + 4 + ); + + let page = repo + .upstream_oauth_session() + .list( + filter.with_sub_claim("alice").with_sid_claim("one"), + Pagination::first(10), + ) + .await + .unwrap(); + assert_eq!(page.edges.len(), 4); + for edge in page.edges { + assert_eq!( + edge.node + .id_token_claims() + .unwrap() + .get("sub") + .unwrap() + .as_str(), + Some("alice") + ); + assert_eq!( + edge.node + .id_token_claims() + .unwrap() + .get("sid") + .unwrap() + .as_str(), + Some("one") + ); + } + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/provider.rs b/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/provider.rs new file mode 100644 index 00000000..caade738 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/provider.rs @@ -0,0 +1,984 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{Clock, UpstreamOAuthProvider, UpstreamOAuthProviderClaimsImports}; +use mas_storage::{ + Page, Pagination, + pagination::Node, + upstream_oauth2::{ + UpstreamOAuthProviderFilter, UpstreamOAuthProviderParams, UpstreamOAuthProviderRepository, + }, +}; +use opentelemetry_semantic_conventions::attribute::DB_QUERY_TEXT; +use rand::RngCore; +use sea_query::{Expr, PostgresQueryBuilder, Query, enum_def}; +use sea_query_binder::SqlxBinder; +use sqlx::{PgConnection, types::Json}; +use tracing::{Instrument, info_span}; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseError, DatabaseInconsistencyError, + filter::{Filter, StatementExt}, + iden::UpstreamOAuthProviders, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +/// An implementation of [`UpstreamOAuthProviderRepository`] for a PostgreSQL +/// connection +pub struct PgUpstreamOAuthProviderRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUpstreamOAuthProviderRepository<'c> { + /// Create a new [`PgUpstreamOAuthProviderRepository`] from an active + /// PostgreSQL connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[derive(sqlx::FromRow)] +#[enum_def] +struct ProviderLookup { + upstream_oauth_provider_id: Uuid, + issuer: Option, + human_name: Option, + brand_name: Option, + scope: String, + client_id: String, + encrypted_client_secret: Option, + token_endpoint_signing_alg: Option, + token_endpoint_auth_method: String, + id_token_signed_response_alg: String, + fetch_userinfo: bool, + userinfo_signed_response_alg: Option, + created_at: DateTime, + disabled_at: Option>, + claims_imports: Json, + jwks_uri_override: Option, + authorization_endpoint_override: Option, + token_endpoint_override: Option, + userinfo_endpoint_override: Option, + discovery_mode: String, + pkce_mode: String, + response_mode: Option, + additional_parameters: Option>>, + forward_login_hint: bool, + on_backchannel_logout: String, +} + +impl Node for ProviderLookup { + fn cursor(&self) -> Ulid { + self.upstream_oauth_provider_id.into() + } +} + +impl TryFrom for UpstreamOAuthProvider { + type Error = DatabaseInconsistencyError; + + fn try_from(value: ProviderLookup) -> Result { + let id = value.upstream_oauth_provider_id.into(); + let scope = value.scope.parse().map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("scope") + .row(id) + .source(e) + })?; + let token_endpoint_auth_method = value.token_endpoint_auth_method.parse().map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("token_endpoint_auth_method") + .row(id) + .source(e) + })?; + let token_endpoint_signing_alg = value + .token_endpoint_signing_alg + .map(|x| x.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("token_endpoint_signing_alg") + .row(id) + .source(e) + })?; + let id_token_signed_response_alg = + value.id_token_signed_response_alg.parse().map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("id_token_signed_response_alg") + .row(id) + .source(e) + })?; + + let userinfo_signed_response_alg = value + .userinfo_signed_response_alg + .map(|x| x.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("userinfo_signed_response_alg") + .row(id) + .source(e) + })?; + + let authorization_endpoint_override = value + .authorization_endpoint_override + .map(|x| x.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("authorization_endpoint_override") + .row(id) + .source(e) + })?; + + let token_endpoint_override = value + .token_endpoint_override + .map(|x| x.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("token_endpoint_override") + .row(id) + .source(e) + })?; + + let userinfo_endpoint_override = value + .userinfo_endpoint_override + .map(|x| x.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("userinfo_endpoint_override") + .row(id) + .source(e) + })?; + + let jwks_uri_override = value + .jwks_uri_override + .map(|x| x.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("jwks_uri_override") + .row(id) + .source(e) + })?; + + let discovery_mode = value.discovery_mode.parse().map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("discovery_mode") + .row(id) + .source(e) + })?; + + let pkce_mode = value.pkce_mode.parse().map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("pkce_mode") + .row(id) + .source(e) + })?; + + let response_mode = value + .response_mode + .map(|x| x.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("response_mode") + .row(id) + .source(e) + })?; + + let additional_authorization_parameters = value + .additional_parameters + .map(|Json(x)| x) + .unwrap_or_default(); + + let on_backchannel_logout = value.on_backchannel_logout.parse().map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("on_backchannel_logout") + .row(id) + .source(e) + })?; + + Ok(UpstreamOAuthProvider { + id, + issuer: value.issuer, + human_name: value.human_name, + brand_name: value.brand_name, + scope, + client_id: value.client_id, + encrypted_client_secret: value.encrypted_client_secret, + token_endpoint_auth_method, + token_endpoint_signing_alg, + id_token_signed_response_alg, + fetch_userinfo: value.fetch_userinfo, + userinfo_signed_response_alg, + created_at: value.created_at, + disabled_at: value.disabled_at, + claims_imports: value.claims_imports.0, + authorization_endpoint_override, + token_endpoint_override, + userinfo_endpoint_override, + jwks_uri_override, + discovery_mode, + pkce_mode, + response_mode, + additional_authorization_parameters, + forward_login_hint: value.forward_login_hint, + on_backchannel_logout, + }) + } +} + +impl Filter for UpstreamOAuthProviderFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all().add_option(self.enabled().map(|enabled| { + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::DisabledAt, + )) + .is_null() + .eq(enabled) + })) + } +} + +#[async_trait] +impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.upstream_oauth_provider.lookup", + skip_all, + fields( + db.query.text, + upstream_oauth_provider.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + ProviderLookup, + r#" + SELECT + upstream_oauth_provider_id, + issuer, + human_name, + brand_name, + scope, + client_id, + encrypted_client_secret, + token_endpoint_signing_alg, + token_endpoint_auth_method, + id_token_signed_response_alg, + fetch_userinfo, + userinfo_signed_response_alg, + created_at, + disabled_at, + claims_imports as "claims_imports: Json", + jwks_uri_override, + authorization_endpoint_override, + token_endpoint_override, + userinfo_endpoint_override, + discovery_mode, + pkce_mode, + response_mode, + additional_parameters as "additional_parameters: Json>", + forward_login_hint, + on_backchannel_logout + FROM upstream_oauth_providers + WHERE upstream_oauth_provider_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let res = res + .map(UpstreamOAuthProvider::try_from) + .transpose() + .map_err(DatabaseError::from)?; + + Ok(res) + } + + #[tracing::instrument( + name = "db.upstream_oauth_provider.add", + skip_all, + fields( + db.query.text, + upstream_oauth_provider.id, + upstream_oauth_provider.issuer = params.issuer, + upstream_oauth_provider.client_id = %params.client_id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + params: UpstreamOAuthProviderParams, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("upstream_oauth_provider.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO upstream_oauth_providers ( + upstream_oauth_provider_id, + issuer, + human_name, + brand_name, + scope, + token_endpoint_auth_method, + token_endpoint_signing_alg, + id_token_signed_response_alg, + fetch_userinfo, + userinfo_signed_response_alg, + client_id, + encrypted_client_secret, + claims_imports, + authorization_endpoint_override, + token_endpoint_override, + userinfo_endpoint_override, + jwks_uri_override, + discovery_mode, + pkce_mode, + response_mode, + forward_login_hint, + on_backchannel_logout, + created_at + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, + $12, $13, $14, $15, $16, $17, $18, $19, $20, + $21, $22, $23) + "#, + Uuid::from(id), + params.issuer.as_deref(), + params.human_name.as_deref(), + params.brand_name.as_deref(), + params.scope.to_string(), + params.token_endpoint_auth_method.to_string(), + params + .token_endpoint_signing_alg + .as_ref() + .map(ToString::to_string), + params.id_token_signed_response_alg.to_string(), + params.fetch_userinfo, + params + .userinfo_signed_response_alg + .as_ref() + .map(ToString::to_string), + ¶ms.client_id, + params.encrypted_client_secret.as_deref(), + Json(¶ms.claims_imports) as _, + params + .authorization_endpoint_override + .as_ref() + .map(ToString::to_string), + params + .token_endpoint_override + .as_ref() + .map(ToString::to_string), + params + .userinfo_endpoint_override + .as_ref() + .map(ToString::to_string), + params.jwks_uri_override.as_ref().map(ToString::to_string), + params.discovery_mode.as_str(), + params.pkce_mode.as_str(), + params.response_mode.as_ref().map(ToString::to_string), + params.forward_login_hint, + params.on_backchannel_logout.as_str(), + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(UpstreamOAuthProvider { + id, + issuer: params.issuer, + human_name: params.human_name, + brand_name: params.brand_name, + scope: params.scope, + client_id: params.client_id, + encrypted_client_secret: params.encrypted_client_secret, + token_endpoint_signing_alg: params.token_endpoint_signing_alg, + token_endpoint_auth_method: params.token_endpoint_auth_method, + id_token_signed_response_alg: params.id_token_signed_response_alg, + fetch_userinfo: params.fetch_userinfo, + userinfo_signed_response_alg: params.userinfo_signed_response_alg, + created_at, + disabled_at: None, + claims_imports: params.claims_imports, + authorization_endpoint_override: params.authorization_endpoint_override, + token_endpoint_override: params.token_endpoint_override, + userinfo_endpoint_override: params.userinfo_endpoint_override, + jwks_uri_override: params.jwks_uri_override, + discovery_mode: params.discovery_mode, + pkce_mode: params.pkce_mode, + response_mode: params.response_mode, + additional_authorization_parameters: params.additional_authorization_parameters, + on_backchannel_logout: params.on_backchannel_logout, + forward_login_hint: params.forward_login_hint, + }) + } + + #[tracing::instrument( + name = "db.upstream_oauth_provider.delete_by_id", + skip_all, + fields( + db.query.text, + upstream_oauth_provider.id = %id, + ), + err, + )] + async fn delete_by_id(&mut self, id: Ulid) -> Result<(), Self::Error> { + // Delete the authorization sessions first, as they have a foreign key + // constraint on the links and the providers. + { + let span = info_span!( + "db.oauth2_client.delete_by_id.authorization_sessions", + upstream_oauth_provider.id = %id, + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + sqlx::query!( + r#" + DELETE FROM upstream_oauth_authorization_sessions + WHERE upstream_oauth_provider_id = $1 + "#, + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + } + + // Delete the links next, as they have a foreign key constraint on the + // providers. + { + let span = info_span!( + "db.oauth2_client.delete_by_id.links", + upstream_oauth_provider.id = %id, + { DB_QUERY_TEXT } = tracing::field::Empty, + ); + sqlx::query!( + r#" + DELETE FROM upstream_oauth_links + WHERE upstream_oauth_provider_id = $1 + "#, + Uuid::from(id), + ) + .record(&span) + .execute(&mut *self.conn) + .instrument(span) + .await?; + } + + let res = sqlx::query!( + r#" + DELETE FROM upstream_oauth_providers + WHERE upstream_oauth_provider_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1) + } + + #[tracing::instrument( + name = "db.upstream_oauth_provider.add", + skip_all, + fields( + db.query.text, + upstream_oauth_provider.id = %id, + upstream_oauth_provider.issuer = params.issuer, + upstream_oauth_provider.client_id = %params.client_id, + ), + err, + )] + async fn upsert( + &mut self, + clock: &dyn Clock, + id: Ulid, + params: UpstreamOAuthProviderParams, + ) -> Result { + let created_at = clock.now(); + + let created_at = sqlx::query_scalar!( + r#" + INSERT INTO upstream_oauth_providers ( + upstream_oauth_provider_id, + issuer, + human_name, + brand_name, + scope, + token_endpoint_auth_method, + token_endpoint_signing_alg, + id_token_signed_response_alg, + fetch_userinfo, + userinfo_signed_response_alg, + client_id, + encrypted_client_secret, + claims_imports, + authorization_endpoint_override, + token_endpoint_override, + userinfo_endpoint_override, + jwks_uri_override, + discovery_mode, + pkce_mode, + response_mode, + additional_parameters, + forward_login_hint, + ui_order, + on_backchannel_logout, + created_at + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, + $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, + $21, $22, $23, $24, $25) + ON CONFLICT (upstream_oauth_provider_id) + DO UPDATE + SET + issuer = EXCLUDED.issuer, + human_name = EXCLUDED.human_name, + brand_name = EXCLUDED.brand_name, + scope = EXCLUDED.scope, + token_endpoint_auth_method = EXCLUDED.token_endpoint_auth_method, + token_endpoint_signing_alg = EXCLUDED.token_endpoint_signing_alg, + id_token_signed_response_alg = EXCLUDED.id_token_signed_response_alg, + fetch_userinfo = EXCLUDED.fetch_userinfo, + userinfo_signed_response_alg = EXCLUDED.userinfo_signed_response_alg, + disabled_at = NULL, + client_id = EXCLUDED.client_id, + encrypted_client_secret = EXCLUDED.encrypted_client_secret, + claims_imports = EXCLUDED.claims_imports, + authorization_endpoint_override = EXCLUDED.authorization_endpoint_override, + token_endpoint_override = EXCLUDED.token_endpoint_override, + userinfo_endpoint_override = EXCLUDED.userinfo_endpoint_override, + jwks_uri_override = EXCLUDED.jwks_uri_override, + discovery_mode = EXCLUDED.discovery_mode, + pkce_mode = EXCLUDED.pkce_mode, + response_mode = EXCLUDED.response_mode, + additional_parameters = EXCLUDED.additional_parameters, + forward_login_hint = EXCLUDED.forward_login_hint, + ui_order = EXCLUDED.ui_order, + on_backchannel_logout = EXCLUDED.on_backchannel_logout + RETURNING created_at + "#, + Uuid::from(id), + params.issuer.as_deref(), + params.human_name.as_deref(), + params.brand_name.as_deref(), + params.scope.to_string(), + params.token_endpoint_auth_method.to_string(), + params + .token_endpoint_signing_alg + .as_ref() + .map(ToString::to_string), + params.id_token_signed_response_alg.to_string(), + params.fetch_userinfo, + params + .userinfo_signed_response_alg + .as_ref() + .map(ToString::to_string), + ¶ms.client_id, + params.encrypted_client_secret.as_deref(), + Json(¶ms.claims_imports) as _, + params + .authorization_endpoint_override + .as_ref() + .map(ToString::to_string), + params + .token_endpoint_override + .as_ref() + .map(ToString::to_string), + params + .userinfo_endpoint_override + .as_ref() + .map(ToString::to_string), + params.jwks_uri_override.as_ref().map(ToString::to_string), + params.discovery_mode.as_str(), + params.pkce_mode.as_str(), + params.response_mode.as_ref().map(ToString::to_string), + Json(¶ms.additional_authorization_parameters) as _, + params.forward_login_hint, + params.ui_order, + params.on_backchannel_logout.as_str(), + created_at, + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(UpstreamOAuthProvider { + id, + issuer: params.issuer, + human_name: params.human_name, + brand_name: params.brand_name, + scope: params.scope, + client_id: params.client_id, + encrypted_client_secret: params.encrypted_client_secret, + token_endpoint_signing_alg: params.token_endpoint_signing_alg, + token_endpoint_auth_method: params.token_endpoint_auth_method, + id_token_signed_response_alg: params.id_token_signed_response_alg, + fetch_userinfo: params.fetch_userinfo, + userinfo_signed_response_alg: params.userinfo_signed_response_alg, + created_at, + disabled_at: None, + claims_imports: params.claims_imports, + authorization_endpoint_override: params.authorization_endpoint_override, + token_endpoint_override: params.token_endpoint_override, + userinfo_endpoint_override: params.userinfo_endpoint_override, + jwks_uri_override: params.jwks_uri_override, + discovery_mode: params.discovery_mode, + pkce_mode: params.pkce_mode, + response_mode: params.response_mode, + additional_authorization_parameters: params.additional_authorization_parameters, + forward_login_hint: params.forward_login_hint, + on_backchannel_logout: params.on_backchannel_logout, + }) + } + + #[tracing::instrument( + name = "db.upstream_oauth_provider.disable", + skip_all, + fields( + db.query.text, + %upstream_oauth_provider.id, + ), + err, + )] + async fn disable( + &mut self, + clock: &dyn Clock, + mut upstream_oauth_provider: UpstreamOAuthProvider, + ) -> Result { + let disabled_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE upstream_oauth_providers + SET disabled_at = $2 + WHERE upstream_oauth_provider_id = $1 + "#, + Uuid::from(upstream_oauth_provider.id), + disabled_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + upstream_oauth_provider.disabled_at = Some(disabled_at); + + Ok(upstream_oauth_provider) + } + + #[tracing::instrument( + name = "db.upstream_oauth_provider.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: UpstreamOAuthProviderFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::UpstreamOAuthProviderId, + )), + ProviderLookupIden::UpstreamOauthProviderId, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::Issuer, + )), + ProviderLookupIden::Issuer, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::HumanName, + )), + ProviderLookupIden::HumanName, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::BrandName, + )), + ProviderLookupIden::BrandName, + ) + .expr_as( + Expr::col((UpstreamOAuthProviders::Table, UpstreamOAuthProviders::Scope)), + ProviderLookupIden::Scope, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::ClientId, + )), + ProviderLookupIden::ClientId, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::EncryptedClientSecret, + )), + ProviderLookupIden::EncryptedClientSecret, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::TokenEndpointSigningAlg, + )), + ProviderLookupIden::TokenEndpointSigningAlg, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::TokenEndpointAuthMethod, + )), + ProviderLookupIden::TokenEndpointAuthMethod, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::IdTokenSignedResponseAlg, + )), + ProviderLookupIden::IdTokenSignedResponseAlg, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::FetchUserinfo, + )), + ProviderLookupIden::FetchUserinfo, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::UserinfoSignedResponseAlg, + )), + ProviderLookupIden::UserinfoSignedResponseAlg, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::CreatedAt, + )), + ProviderLookupIden::CreatedAt, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::DisabledAt, + )), + ProviderLookupIden::DisabledAt, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::ClaimsImports, + )), + ProviderLookupIden::ClaimsImports, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::JwksUriOverride, + )), + ProviderLookupIden::JwksUriOverride, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::TokenEndpointOverride, + )), + ProviderLookupIden::TokenEndpointOverride, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::AuthorizationEndpointOverride, + )), + ProviderLookupIden::AuthorizationEndpointOverride, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::UserinfoEndpointOverride, + )), + ProviderLookupIden::UserinfoEndpointOverride, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::DiscoveryMode, + )), + ProviderLookupIden::DiscoveryMode, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::PkceMode, + )), + ProviderLookupIden::PkceMode, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::ResponseMode, + )), + ProviderLookupIden::ResponseMode, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::AdditionalParameters, + )), + ProviderLookupIden::AdditionalParameters, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::ForwardLoginHint, + )), + ProviderLookupIden::ForwardLoginHint, + ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::OnBackchannelLogout, + )), + ProviderLookupIden::OnBackchannelLogout, + ) + .from(UpstreamOAuthProviders::Table) + .apply_filter(filter) + .generate_pagination( + ( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::UpstreamOAuthProviderId, + ), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination + .process(edges) + .try_map(UpstreamOAuthProvider::try_from)?; + + return Ok(page); + } + + #[tracing::instrument( + name = "db.upstream_oauth_provider.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count( + &mut self, + filter: UpstreamOAuthProviderFilter<'_>, + ) -> Result { + let (sql, arguments) = Query::select() + .expr( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::UpstreamOAuthProviderId, + )) + .count(), + ) + .from(UpstreamOAuthProviders::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.upstream_oauth_provider.all_enabled", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn all_enabled(&mut self) -> Result, Self::Error> { + let res = sqlx::query_as!( + ProviderLookup, + r#" + SELECT + upstream_oauth_provider_id, + issuer, + human_name, + brand_name, + scope, + client_id, + encrypted_client_secret, + token_endpoint_signing_alg, + token_endpoint_auth_method, + id_token_signed_response_alg, + fetch_userinfo, + userinfo_signed_response_alg, + created_at, + disabled_at, + claims_imports as "claims_imports: Json", + jwks_uri_override, + authorization_endpoint_override, + token_endpoint_override, + userinfo_endpoint_override, + discovery_mode, + pkce_mode, + response_mode, + additional_parameters as "additional_parameters: Json>", + forward_login_hint, + on_backchannel_logout + FROM upstream_oauth_providers + WHERE disabled_at IS NULL + ORDER BY ui_order ASC, upstream_oauth_provider_id ASC + "#, + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let res: Result, _> = res.into_iter().map(TryInto::try_into).collect(); + Ok(res?) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/session.rs b/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/session.rs new file mode 100644 index 00000000..8d7e1855 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/upstream_oauth2/session.rs @@ -0,0 +1,621 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + BrowserSession, Clock, UpstreamOAuthAuthorizationSession, + UpstreamOAuthAuthorizationSessionState, UpstreamOAuthLink, UpstreamOAuthProvider, +}; +use mas_storage::{ + Page, Pagination, + pagination::Node, + upstream_oauth2::{UpstreamOAuthSessionFilter, UpstreamOAuthSessionRepository}, +}; +use rand::RngCore; +use sea_query::{Expr, PostgresQueryBuilder, Query, enum_def, extension::postgres::PgExpr}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseError, DatabaseInconsistencyError, + filter::{Filter, StatementExt}, + iden::UpstreamOAuthAuthorizationSessions, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +impl Filter for UpstreamOAuthSessionFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.provider().map(|provider| { + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthProviderId, + )) + .eq(Uuid::from(provider.id)) + })) + .add_option(self.sub_claim().map(|sub| { + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::IdTokenClaims, + )) + .cast_json_field("sub") + .eq(sub) + })) + .add_option(self.sid_claim().map(|sid| { + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::IdTokenClaims, + )) + .cast_json_field("sid") + .eq(sid) + })) + } +} + +/// An implementation of [`UpstreamOAuthSessionRepository`] for a PostgreSQL +/// connection +pub struct PgUpstreamOAuthSessionRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUpstreamOAuthSessionRepository<'c> { + /// Create a new [`PgUpstreamOAuthSessionRepository`] from an active + /// PostgreSQL connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[derive(sqlx::FromRow)] +#[enum_def] +struct SessionLookup { + upstream_oauth_authorization_session_id: Uuid, + upstream_oauth_provider_id: Uuid, + upstream_oauth_link_id: Option, + state: String, + code_challenge_verifier: Option, + nonce: Option, + id_token: Option, + id_token_claims: Option, + userinfo: Option, + created_at: DateTime, + completed_at: Option>, + consumed_at: Option>, + extra_callback_parameters: Option, + unlinked_at: Option>, +} + +impl Node for SessionLookup { + fn cursor(&self) -> Ulid { + self.upstream_oauth_authorization_session_id.into() + } +} + +impl TryFrom for UpstreamOAuthAuthorizationSession { + type Error = DatabaseInconsistencyError; + + fn try_from(value: SessionLookup) -> Result { + let id = value.upstream_oauth_authorization_session_id.into(); + let state = match ( + value.upstream_oauth_link_id, + value.id_token, + value.id_token_claims, + value.extra_callback_parameters, + value.userinfo, + value.completed_at, + value.consumed_at, + value.unlinked_at, + ) { + (None, None, None, None, None, None, None, None) => { + UpstreamOAuthAuthorizationSessionState::Pending + } + ( + Some(link_id), + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + Some(completed_at), + None, + None, + ) => UpstreamOAuthAuthorizationSessionState::Completed { + completed_at, + link_id: link_id.into(), + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + }, + ( + Some(link_id), + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + Some(completed_at), + Some(consumed_at), + None, + ) => UpstreamOAuthAuthorizationSessionState::Consumed { + completed_at, + link_id: link_id.into(), + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + consumed_at, + }, + ( + _, + id_token, + id_token_claims, + _, + _, + Some(completed_at), + consumed_at, + Some(unlinked_at), + ) => UpstreamOAuthAuthorizationSessionState::Unlinked { + completed_at, + id_token, + id_token_claims, + consumed_at, + unlinked_at, + }, + _ => { + return Err(DatabaseInconsistencyError::on( + "upstream_oauth_authorization_sessions", + ) + .row(id)); + } + }; + + Ok(Self { + id, + provider_id: value.upstream_oauth_provider_id.into(), + state_str: value.state, + nonce: value.nonce, + code_challenge_verifier: value.code_challenge_verifier, + created_at: value.created_at, + state, + }) + } +} + +#[async_trait] +impl UpstreamOAuthSessionRepository for PgUpstreamOAuthSessionRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.upstream_oauth_authorization_session.lookup", + skip_all, + fields( + db.query.text, + upstream_oauth_provider.id = %id, + ), + err, + )] + async fn lookup( + &mut self, + id: Ulid, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + SessionLookup, + r#" + SELECT + upstream_oauth_authorization_session_id, + upstream_oauth_provider_id, + upstream_oauth_link_id, + state, + code_challenge_verifier, + nonce, + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + created_at, + completed_at, + consumed_at, + unlinked_at + FROM upstream_oauth_authorization_sessions + WHERE upstream_oauth_authorization_session_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.upstream_oauth_authorization_session.add", + skip_all, + fields( + db.query.text, + %upstream_oauth_provider.id, + upstream_oauth_provider.issuer = upstream_oauth_provider.issuer, + %upstream_oauth_provider.client_id, + upstream_oauth_authorization_session.id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + upstream_oauth_provider: &UpstreamOAuthProvider, + state_str: String, + code_challenge_verifier: Option, + nonce: Option, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record( + "upstream_oauth_authorization_session.id", + tracing::field::display(id), + ); + + sqlx::query!( + r#" + INSERT INTO upstream_oauth_authorization_sessions ( + upstream_oauth_authorization_session_id, + upstream_oauth_provider_id, + state, + code_challenge_verifier, + nonce, + created_at, + completed_at, + consumed_at, + id_token, + userinfo + ) VALUES ($1, $2, $3, $4, $5, $6, NULL, NULL, NULL, NULL) + "#, + Uuid::from(id), + Uuid::from(upstream_oauth_provider.id), + &state_str, + code_challenge_verifier.as_deref(), + nonce, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(UpstreamOAuthAuthorizationSession { + id, + state: UpstreamOAuthAuthorizationSessionState::default(), + provider_id: upstream_oauth_provider.id, + state_str, + code_challenge_verifier, + nonce, + created_at, + }) + } + + #[tracing::instrument( + name = "db.upstream_oauth_authorization_session.complete_with_link", + skip_all, + fields( + db.query.text, + %upstream_oauth_authorization_session.id, + %upstream_oauth_link.id, + ), + err, + )] + async fn complete_with_link( + &mut self, + clock: &dyn Clock, + upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, + upstream_oauth_link: &UpstreamOAuthLink, + id_token: Option, + id_token_claims: Option, + extra_callback_parameters: Option, + userinfo: Option, + ) -> Result { + let completed_at = clock.now(); + + sqlx::query!( + r#" + UPDATE upstream_oauth_authorization_sessions + SET upstream_oauth_link_id = $1 + , completed_at = $2 + , id_token = $3 + , id_token_claims = $4 + , extra_callback_parameters = $5 + , userinfo = $6 + WHERE upstream_oauth_authorization_session_id = $7 + "#, + Uuid::from(upstream_oauth_link.id), + completed_at, + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + Uuid::from(upstream_oauth_authorization_session.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + let upstream_oauth_authorization_session = upstream_oauth_authorization_session + .complete( + completed_at, + upstream_oauth_link, + id_token, + id_token_claims, + extra_callback_parameters, + userinfo, + ) + .map_err(DatabaseError::to_invalid_operation)?; + + Ok(upstream_oauth_authorization_session) + } + + /// Mark a session as consumed + #[tracing::instrument( + name = "db.upstream_oauth_authorization_session.consume", + skip_all, + fields( + db.query.text, + %upstream_oauth_authorization_session.id, + ), + err, + )] + async fn consume( + &mut self, + clock: &dyn Clock, + upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, + browser_session: &BrowserSession, + ) -> Result { + let consumed_at = clock.now(); + sqlx::query!( + r#" + UPDATE upstream_oauth_authorization_sessions + SET consumed_at = $1, + user_session_id = $2 + WHERE upstream_oauth_authorization_session_id = $3 + "#, + consumed_at, + Uuid::from(browser_session.id), + Uuid::from(upstream_oauth_authorization_session.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + let upstream_oauth_authorization_session = upstream_oauth_authorization_session + .consume(consumed_at) + .map_err(DatabaseError::to_invalid_operation)?; + + Ok(upstream_oauth_authorization_session) + } + + #[tracing::instrument( + name = "db.upstream_oauth_authorization_session.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: UpstreamOAuthSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthAuthorizationSessionId, + )), + SessionLookupIden::UpstreamOauthAuthorizationSessionId, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthProviderId, + )), + SessionLookupIden::UpstreamOauthProviderId, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthLinkId, + )), + SessionLookupIden::UpstreamOauthLinkId, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::State, + )), + SessionLookupIden::State, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::CodeChallengeVerifier, + )), + SessionLookupIden::CodeChallengeVerifier, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::Nonce, + )), + SessionLookupIden::Nonce, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::IdToken, + )), + SessionLookupIden::IdToken, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::IdTokenClaims, + )), + SessionLookupIden::IdTokenClaims, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::ExtraCallbackParameters, + )), + SessionLookupIden::ExtraCallbackParameters, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::Userinfo, + )), + SessionLookupIden::Userinfo, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::CreatedAt, + )), + SessionLookupIden::CreatedAt, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::CompletedAt, + )), + SessionLookupIden::CompletedAt, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::ConsumedAt, + )), + SessionLookupIden::ConsumedAt, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UnlinkedAt, + )), + SessionLookupIden::UnlinkedAt, + ) + .from(UpstreamOAuthAuthorizationSessions::Table) + .apply_filter(filter) + .generate_pagination( + ( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthAuthorizationSessionId, + ), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination + .process(edges) + .try_map(UpstreamOAuthAuthorizationSession::try_from)?; + + Ok(page) + } + + #[tracing::instrument( + name = "db.upstream_oauth_authorization_session.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count( + &mut self, + filter: UpstreamOAuthSessionFilter<'_>, + ) -> Result { + let (sql, arguments) = Query::select() + .expr( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthAuthorizationSessionId, + )) + .count(), + ) + .from(UpstreamOAuthAuthorizationSessions::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.upstream_oauth_authorization_session.cleanup", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup_orphaned( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error> { + // Use ULID cursor-based pagination for pending sessions only. + // We only delete sessions that are not yet completed. + // `MAX(uuid)` isn't a thing in Postgres, so we aggregate on the client side. + let res = sqlx::query_scalar!( + r#" + WITH to_delete AS ( + SELECT upstream_oauth_authorization_session_id + FROM upstream_oauth_authorization_sessions + WHERE ($1::uuid IS NULL OR upstream_oauth_authorization_session_id > $1) + AND upstream_oauth_authorization_session_id <= $2 + AND user_session_id IS NULL + ORDER BY upstream_oauth_authorization_session_id + LIMIT $3 + ) + DELETE FROM upstream_oauth_authorization_sessions + USING to_delete + WHERE upstream_oauth_authorization_sessions.upstream_oauth_authorization_session_id = to_delete.upstream_oauth_authorization_session_id + RETURNING upstream_oauth_authorization_sessions.upstream_oauth_authorization_session_id + "#, + since.map(Uuid::from), + Uuid::from(until), + i64::try_from(limit).unwrap_or(i64::MAX) + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let count = res.len(); + let max_id = res.into_iter().max(); + + Ok((count, max_id.map(Ulid::from))) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/user/email.rs b/matrix-authentication-service/crates/storage-pg/src/user/email.rs new file mode 100644 index 00000000..50180705 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/user/email.rs @@ -0,0 +1,817 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + BrowserSession, Clock, UpstreamOAuthAuthorizationSession, User, UserEmail, + UserEmailAuthentication, UserEmailAuthenticationCode, UserRegistration, +}; +use mas_storage::{ + Page, Pagination, + pagination::Node, + user::{UserEmailFilter, UserEmailRepository}, +}; +use rand::RngCore; +use sea_query::{Expr, Func, PostgresQueryBuilder, Query, SimpleExpr, enum_def}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseError, + filter::{Filter, StatementExt}, + iden::UserEmails, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +/// An implementation of [`UserEmailRepository`] for a PostgreSQL connection +pub struct PgUserEmailRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUserEmailRepository<'c> { + /// Create a new [`PgUserEmailRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[derive(Debug, Clone, sqlx::FromRow)] +#[enum_def] +struct UserEmailLookup { + user_email_id: Uuid, + user_id: Uuid, + email: String, + created_at: DateTime, +} + +impl Node for UserEmailLookup { + fn cursor(&self) -> Ulid { + self.user_email_id.into() + } +} + +impl From for UserEmail { + fn from(e: UserEmailLookup) -> UserEmail { + UserEmail { + id: e.user_email_id.into(), + user_id: e.user_id.into(), + email: e.email, + created_at: e.created_at, + } + } +} + +struct UserEmailAuthenticationLookup { + user_email_authentication_id: Uuid, + user_session_id: Option, + user_registration_id: Option, + email: String, + created_at: DateTime, + completed_at: Option>, +} + +impl From for UserEmailAuthentication { + fn from(value: UserEmailAuthenticationLookup) -> Self { + UserEmailAuthentication { + id: value.user_email_authentication_id.into(), + user_session_id: value.user_session_id.map(Ulid::from), + user_registration_id: value.user_registration_id.map(Ulid::from), + email: value.email, + created_at: value.created_at, + completed_at: value.completed_at, + } + } +} + +struct UserEmailAuthenticationCodeLookup { + user_email_authentication_code_id: Uuid, + user_email_authentication_id: Uuid, + code: String, + created_at: DateTime, + expires_at: DateTime, +} + +impl From for UserEmailAuthenticationCode { + fn from(value: UserEmailAuthenticationCodeLookup) -> Self { + UserEmailAuthenticationCode { + id: value.user_email_authentication_code_id.into(), + user_email_authentication_id: value.user_email_authentication_id.into(), + code: value.code, + created_at: value.created_at, + expires_at: value.expires_at, + } + } +} + +impl Filter for UserEmailFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.user().map(|user| { + Expr::col((UserEmails::Table, UserEmails::UserId)).eq(Uuid::from(user.id)) + })) + .add_option(self.email().map(|email| { + SimpleExpr::from(Func::lower(Expr::col(( + UserEmails::Table, + UserEmails::Email, + )))) + .eq(Func::lower(email)) + })) + } +} + +#[async_trait] +impl UserEmailRepository for PgUserEmailRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.user_email.lookup", + skip_all, + fields( + db.query.text, + user_email.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserEmailLookup, + r#" + SELECT user_email_id + , user_id + , email + , created_at + FROM user_emails + + WHERE user_email_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(user_email) = res else { + return Ok(None); + }; + + Ok(Some(user_email.into())) + } + + #[tracing::instrument( + name = "db.user_email.find", + skip_all, + fields( + db.query.text, + %user.id, + user_email.email = email, + ), + err, + )] + async fn find(&mut self, user: &User, email: &str) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserEmailLookup, + r#" + SELECT user_email_id + , user_id + , email + , created_at + FROM user_emails + + WHERE user_id = $1 AND LOWER(email) = LOWER($2) + "#, + Uuid::from(user.id), + email, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(user_email) = res else { + return Ok(None); + }; + + Ok(Some(user_email.into())) + } + + #[tracing::instrument( + name = "db.user_email.find_by_email", + skip_all, + fields( + db.query.text, + user_email.email = email, + ), + err, + )] + async fn find_by_email(&mut self, email: &str) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserEmailLookup, + r#" + SELECT user_email_id + , user_id + , email + , created_at + FROM user_emails + WHERE LOWER(email) = LOWER($1) + "#, + email, + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + if res.len() != 1 { + return Ok(None); + } + + let Some(user_email) = res.into_iter().next() else { + return Ok(None); + }; + + Ok(Some(user_email.into())) + } + + #[tracing::instrument( + name = "db.user_email.all", + skip_all, + fields( + db.query.text, + %user.id, + ), + err, + )] + async fn all(&mut self, user: &User) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserEmailLookup, + r#" + SELECT user_email_id + , user_id + , email + , created_at + FROM user_emails + + WHERE user_id = $1 + + ORDER BY email ASC + "#, + Uuid::from(user.id), + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + Ok(res.into_iter().map(Into::into).collect()) + } + + #[tracing::instrument( + name = "db.user_email.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: UserEmailFilter<'_>, + pagination: Pagination, + ) -> Result, DatabaseError> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col((UserEmails::Table, UserEmails::UserEmailId)), + UserEmailLookupIden::UserEmailId, + ) + .expr_as( + Expr::col((UserEmails::Table, UserEmails::UserId)), + UserEmailLookupIden::UserId, + ) + .expr_as( + Expr::col((UserEmails::Table, UserEmails::Email)), + UserEmailLookupIden::Email, + ) + .expr_as( + Expr::col((UserEmails::Table, UserEmails::CreatedAt)), + UserEmailLookupIden::CreatedAt, + ) + .from(UserEmails::Table) + .apply_filter(filter) + .generate_pagination((UserEmails::Table, UserEmails::UserEmailId), pagination) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination.process(edges).map(UserEmail::from); + + Ok(page) + } + + #[tracing::instrument( + name = "db.user_email.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count(&mut self, filter: UserEmailFilter<'_>) -> Result { + let (sql, arguments) = Query::select() + .expr(Expr::col((UserEmails::Table, UserEmails::UserEmailId)).count()) + .from(UserEmails::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.user_email.add", + skip_all, + fields( + db.query.text, + %user.id, + user_email.id, + user_email.email = email, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + email: String, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("user_email.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO user_emails (user_email_id, user_id, email, created_at) + VALUES ($1, $2, $3, $4) + "#, + Uuid::from(id), + Uuid::from(user.id), + &email, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(UserEmail { + id, + user_id: user.id, + email, + created_at, + }) + } + + #[tracing::instrument( + name = "db.user_email.remove", + skip_all, + fields( + db.query.text, + user.id = %user_email.user_id, + %user_email.id, + %user_email.email, + ), + err, + )] + async fn remove(&mut self, user_email: UserEmail) -> Result<(), Self::Error> { + let res = sqlx::query!( + r#" + DELETE FROM user_emails + WHERE user_email_id = $1 + "#, + Uuid::from(user_email.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.user_email.remove_bulk", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn remove_bulk(&mut self, filter: UserEmailFilter<'_>) -> Result { + let (sql, arguments) = Query::delete() + .from_table(UserEmails::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let res = sqlx::query_with(&sql, arguments) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(res.rows_affected().try_into().unwrap_or(usize::MAX)) + } + + #[tracing::instrument( + name = "db.user_email.add_authentication_for_session", + skip_all, + fields( + db.query.text, + %session.id, + user_email_authentication.id, + user_email_authentication.email = email, + ), + err, + )] + async fn add_authentication_for_session( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + email: String, + session: &BrowserSession, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current() + .record("user_email_authentication.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO user_email_authentications + ( user_email_authentication_id + , user_session_id + , email + , created_at + ) + VALUES ($1, $2, $3, $4) + "#, + Uuid::from(id), + Uuid::from(session.id), + &email, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(UserEmailAuthentication { + id, + user_session_id: Some(session.id), + user_registration_id: None, + email, + created_at, + completed_at: None, + }) + } + + #[tracing::instrument( + name = "db.user_email.add_authentication_for_registration", + skip_all, + fields( + db.query.text, + %user_registration.id, + user_email_authentication.id, + user_email_authentication.email = email, + ), + err, + )] + async fn add_authentication_for_registration( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + email: String, + user_registration: &UserRegistration, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current() + .record("user_email_authentication.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO user_email_authentications + ( user_email_authentication_id + , user_registration_id + , email + , created_at + ) + VALUES ($1, $2, $3, $4) + "#, + Uuid::from(id), + Uuid::from(user_registration.id), + &email, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(UserEmailAuthentication { + id, + user_session_id: None, + user_registration_id: Some(user_registration.id), + email, + created_at, + completed_at: None, + }) + } + + #[tracing::instrument( + name = "db.user_email.add_authentication_code", + skip_all, + fields( + db.query.text, + %user_email_authentication.id, + %user_email_authentication.email, + user_email_authentication_code.id, + user_email_authentication_code.code = code, + ), + err, + )] + async fn add_authentication_code( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + duration: chrono::Duration, + user_email_authentication: &UserEmailAuthentication, + code: String, + ) -> Result { + let created_at = clock.now(); + let expires_at = created_at + duration; + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record( + "user_email_authentication_code.id", + tracing::field::display(id), + ); + + sqlx::query!( + r#" + INSERT INTO user_email_authentication_codes + ( user_email_authentication_code_id + , user_email_authentication_id + , code + , created_at + , expires_at + ) + VALUES ($1, $2, $3, $4, $5) + "#, + Uuid::from(id), + Uuid::from(user_email_authentication.id), + &code, + created_at, + expires_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(UserEmailAuthenticationCode { + id, + user_email_authentication_id: user_email_authentication.id, + code, + created_at, + expires_at, + }) + } + + #[tracing::instrument( + name = "db.user_email.lookup_authentication", + skip_all, + fields( + db.query.text, + user_email_authentication.id = %id, + ), + err, + )] + async fn lookup_authentication( + &mut self, + id: Ulid, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserEmailAuthenticationLookup, + r#" + SELECT user_email_authentication_id + , user_session_id + , user_registration_id + , email + , created_at + , completed_at + FROM user_email_authentications + WHERE user_email_authentication_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + Ok(res.map(UserEmailAuthentication::from)) + } + + #[tracing::instrument( + name = "db.user_email.find_authentication_by_code", + skip_all, + fields( + db.query.text, + %authentication.id, + user_email_authentication_code.code = code, + ), + err, + )] + async fn find_authentication_code( + &mut self, + authentication: &UserEmailAuthentication, + code: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserEmailAuthenticationCodeLookup, + r#" + SELECT user_email_authentication_code_id + , user_email_authentication_id + , code + , created_at + , expires_at + FROM user_email_authentication_codes + WHERE user_email_authentication_id = $1 + AND code = $2 + "#, + Uuid::from(authentication.id), + code, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + Ok(res.map(UserEmailAuthenticationCode::from)) + } + + #[tracing::instrument( + name = "db.user_email.complete_email_authentication_with_code", + skip_all, + fields( + db.query.text, + %user_email_authentication.id, + %user_email_authentication.email, + %user_email_authentication_code.id, + %user_email_authentication_code.code, + ), + err, + )] + async fn complete_authentication_with_code( + &mut self, + clock: &dyn Clock, + mut user_email_authentication: UserEmailAuthentication, + user_email_authentication_code: &UserEmailAuthenticationCode, + ) -> Result { + // We technically don't use the authentication code here (other than + // recording it in the span), but this is to make sure the caller has + // fetched one before calling this + let completed_at = clock.now(); + + // We'll assume the caller has checked that completed_at is None, so in case + // they haven't, the update will not affect any rows, which will raise + // an error + let res = sqlx::query!( + r#" + UPDATE user_email_authentications + SET completed_at = $2 + WHERE user_email_authentication_id = $1 + AND completed_at IS NULL + "#, + Uuid::from(user_email_authentication.id), + completed_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user_email_authentication.completed_at = Some(completed_at); + Ok(user_email_authentication) + } + + #[tracing::instrument( + name = "db.user_email.complete_email_authentication_with_upstream", + skip_all, + fields( + db.query.text, + %user_email_authentication.id, + %user_email_authentication.email, + %upstream_oauth_authorization_session.id, + ), + err, + )] + async fn complete_authentication_with_upstream( + &mut self, + clock: &dyn Clock, + mut user_email_authentication: UserEmailAuthentication, + upstream_oauth_authorization_session: &UpstreamOAuthAuthorizationSession, + ) -> Result { + // We technically don't use the upstream_oauth_authorization_session here (other + // than recording it in the span), but this is to make sure the caller + // has fetched one before calling this + let completed_at = clock.now(); + + // We'll assume the caller has checked that completed_at is None, so in case + // they haven't, the update will not affect any rows, which will raise + // an error + let res = sqlx::query!( + r#" + UPDATE user_email_authentications + SET completed_at = $2 + WHERE user_email_authentication_id = $1 + AND completed_at IS NULL + "#, + Uuid::from(user_email_authentication.id), + completed_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user_email_authentication.completed_at = Some(completed_at); + Ok(user_email_authentication) + } + + #[tracing::instrument( + name = "db.user_email.cleanup_authentications", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup_authentications( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error> { + // Use ULID cursor-based pagination. Since ULIDs contain a timestamp, + // we can efficiently delete old authentications without needing an index. + // `MAX(uuid)` isn't a thing in Postgres, so we aggregate on the client side. + let res = sqlx::query_scalar!( + r#" + WITH + to_delete AS ( + SELECT user_email_authentication_id + FROM user_email_authentications + WHERE ($1::uuid IS NULL OR user_email_authentication_id > $1) + AND user_email_authentication_id <= $2 + ORDER BY user_email_authentication_id + LIMIT $3 + ), + deleted_codes AS ( + DELETE FROM user_email_authentication_codes + USING to_delete + WHERE user_email_authentication_codes.user_email_authentication_id = to_delete.user_email_authentication_id + RETURNING user_email_authentication_codes.user_email_authentication_code_id + ) + DELETE FROM user_email_authentications + USING to_delete + WHERE user_email_authentications.user_email_authentication_id = to_delete.user_email_authentication_id + RETURNING user_email_authentications.user_email_authentication_id + "#, + since.map(Uuid::from), + Uuid::from(until), + i64::try_from(limit).unwrap_or(i64::MAX) + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let count = res.len(); + let max_id = res.into_iter().max(); + + Ok((count, max_id.map(Ulid::from))) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/user/mod.rs b/matrix-authentication-service/crates/storage-pg/src/user/mod.rs new file mode 100644 index 00000000..f63fce2c --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/user/mod.rs @@ -0,0 +1,612 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing the PostgreSQL implementation of the user-related +//! repositories + +use async_trait::async_trait; +use mas_data_model::{Clock, User}; +use mas_storage::user::{UserFilter, UserRepository}; +use rand::RngCore; +use sea_query::{Expr, PostgresQueryBuilder, Query, extension::postgres::PgExpr as _}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseError, + filter::{Filter, StatementExt}, + iden::Users, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +mod email; +mod password; +mod recovery; +mod registration; +mod registration_token; +mod session; +mod terms; + +#[cfg(test)] +mod tests; + +pub use self::{ + email::PgUserEmailRepository, password::PgUserPasswordRepository, + recovery::PgUserRecoveryRepository, registration::PgUserRegistrationRepository, + registration_token::PgUserRegistrationTokenRepository, session::PgBrowserSessionRepository, + terms::PgUserTermsRepository, +}; + +/// An implementation of [`UserRepository`] for a PostgreSQL connection +pub struct PgUserRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUserRepository<'c> { + /// Create a new [`PgUserRepository`] from an active PostgreSQL connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +mod priv_ { + // The enum_def macro generates a public enum, which we don't want, because it + // triggers the missing docs warning + #![allow(missing_docs)] + + use chrono::{DateTime, Utc}; + use mas_storage::pagination::Node; + use sea_query::enum_def; + use ulid::Ulid; + use uuid::Uuid; + + #[derive(Debug, Clone, sqlx::FromRow)] + #[enum_def] + pub(super) struct UserLookup { + pub(super) user_id: Uuid, + pub(super) username: String, + pub(super) created_at: DateTime, + pub(super) locked_at: Option>, + pub(super) deactivated_at: Option>, + pub(super) can_request_admin: bool, + pub(super) is_guest: bool, + } + + impl Node for UserLookup { + fn cursor(&self) -> Ulid { + self.user_id.into() + } + } +} + +use priv_::{UserLookup, UserLookupIden}; + +impl From for User { + fn from(value: UserLookup) -> Self { + let id = value.user_id.into(); + Self { + id, + username: value.username, + sub: id.to_string(), + created_at: value.created_at, + locked_at: value.locked_at, + deactivated_at: value.deactivated_at, + can_request_admin: value.can_request_admin, + is_guest: value.is_guest, + } + } +} + +impl Filter for UserFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.state().map(|state| { + match state { + mas_storage::user::UserState::Deactivated => { + Expr::col((Users::Table, Users::DeactivatedAt)).is_not_null() + } + mas_storage::user::UserState::Locked => { + Expr::col((Users::Table, Users::LockedAt)).is_not_null() + } + mas_storage::user::UserState::Active => { + Expr::col((Users::Table, Users::LockedAt)) + .is_null() + .and(Expr::col((Users::Table, Users::DeactivatedAt)).is_null()) + } + } + })) + .add_option(self.can_request_admin().map(|can_request_admin| { + Expr::col((Users::Table, Users::CanRequestAdmin)).eq(can_request_admin) + })) + .add_option( + self.is_guest() + .map(|is_guest| Expr::col((Users::Table, Users::IsGuest)).eq(is_guest)), + ) + .add_option(self.search().map(|search| { + Expr::col((Users::Table, Users::Username)).ilike(format!("%{search}%")) + })) + } +} + +#[async_trait] +impl UserRepository for PgUserRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.user.lookup", + skip_all, + fields( + db.query.text, + user.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserLookup, + r#" + SELECT user_id + , username + , created_at + , locked_at + , deactivated_at + , can_request_admin + , is_guest + FROM users + WHERE user_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.into())) + } + + #[tracing::instrument( + name = "db.user.find_by_username", + skip_all, + fields( + db.query.text, + user.username = username, + ), + err, + )] + async fn find_by_username(&mut self, username: &str) -> Result, Self::Error> { + // We may have multiple users with the same username, but with a different + // casing. In this case, we want to return the one which matches the exact + // casing + let res = sqlx::query_as!( + UserLookup, + r#" + SELECT user_id + , username + , created_at + , locked_at + , deactivated_at + , can_request_admin + , is_guest + FROM users + WHERE LOWER(username) = LOWER($1) + "#, + username, + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + match &res[..] { + // Happy path: there is only one user matching the username… + [user] => Ok(Some(user.clone().into())), + // …or none. + [] => Ok(None), + list => { + // If there are multiple users with the same username, we want to + // return the one which matches the exact casing + if let Some(user) = list.iter().find(|user| user.username == username) { + Ok(Some(user.clone().into())) + } else { + // If none match exactly, we prefer to return nothing + Ok(None) + } + } + } + } + + #[tracing::instrument( + name = "db.user.add", + skip_all, + fields( + db.query.text, + user.username = username, + user.id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + username: String, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("user.id", tracing::field::display(id)); + + let res = sqlx::query!( + r#" + INSERT INTO users (user_id, username, created_at) + VALUES ($1, $2, $3) + ON CONFLICT (username) DO NOTHING + "#, + Uuid::from(id), + username, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + // If the user already exists, want to return an error but not poison the + // transaction + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(User { + id, + username, + sub: id.to_string(), + created_at, + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }) + } + + #[tracing::instrument( + name = "db.user.exists", + skip_all, + fields( + db.query.text, + user.username = username, + ), + err, + )] + async fn exists(&mut self, username: &str) -> Result { + let exists = sqlx::query_scalar!( + r#" + SELECT EXISTS( + SELECT 1 FROM users WHERE LOWER(username) = LOWER($1) + ) AS "exists!" + "#, + username + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(exists) + } + + #[tracing::instrument( + name = "db.user.lock", + skip_all, + fields( + db.query.text, + %user.id, + ), + err, + )] + async fn lock(&mut self, clock: &dyn Clock, mut user: User) -> Result { + if user.locked_at.is_some() { + return Ok(user); + } + + let locked_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE users + SET locked_at = $1 + WHERE user_id = $2 + "#, + locked_at, + Uuid::from(user.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user.locked_at = Some(locked_at); + + Ok(user) + } + + #[tracing::instrument( + name = "db.user.unlock", + skip_all, + fields( + db.query.text, + %user.id, + ), + err, + )] + async fn unlock(&mut self, mut user: User) -> Result { + if user.locked_at.is_none() { + return Ok(user); + } + + let res = sqlx::query!( + r#" + UPDATE users + SET locked_at = NULL + WHERE user_id = $1 + "#, + Uuid::from(user.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user.locked_at = None; + + Ok(user) + } + + #[tracing::instrument( + name = "db.user.deactivate", + skip_all, + fields( + db.query.text, + %user.id, + ), + err, + )] + async fn deactivate(&mut self, clock: &dyn Clock, mut user: User) -> Result { + if user.deactivated_at.is_some() { + return Ok(user); + } + + let deactivated_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE users + SET deactivated_at = $2 + WHERE user_id = $1 + AND deactivated_at IS NULL + "#, + Uuid::from(user.id), + deactivated_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user.deactivated_at = Some(deactivated_at); + + Ok(user) + } + + #[tracing::instrument( + name = "db.user.reactivate", + skip_all, + fields( + db.query.text, + %user.id, + ), + err, + )] + async fn reactivate(&mut self, mut user: User) -> Result { + if user.deactivated_at.is_none() { + return Ok(user); + } + + let res = sqlx::query!( + r#" + UPDATE users + SET deactivated_at = NULL + WHERE user_id = $1 + "#, + Uuid::from(user.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user.deactivated_at = None; + + Ok(user) + } + + #[tracing::instrument( + name = "db.user.delete_unsupported_threepids", + skip_all, + fields( + db.query.text, + %user.id, + ), + err, + )] + async fn delete_unsupported_threepids(&mut self, user: &User) -> Result { + let res = sqlx::query!( + r#" + DELETE FROM user_unsupported_third_party_ids + WHERE user_id = $1 + "#, + Uuid::from(user.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(res.rows_affected().try_into().unwrap_or(usize::MAX)) + } + + #[tracing::instrument( + name = "db.user.set_can_request_admin", + skip_all, + fields( + db.query.text, + %user.id, + user.can_request_admin = can_request_admin, + ), + err, + )] + async fn set_can_request_admin( + &mut self, + mut user: User, + can_request_admin: bool, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE users + SET can_request_admin = $2 + WHERE user_id = $1 + "#, + Uuid::from(user.id), + can_request_admin, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user.can_request_admin = can_request_admin; + + Ok(user) + } + + #[tracing::instrument( + name = "db.user.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: UserFilter<'_>, + pagination: mas_storage::Pagination, + ) -> Result, Self::Error> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col((Users::Table, Users::UserId)), + UserLookupIden::UserId, + ) + .expr_as( + Expr::col((Users::Table, Users::Username)), + UserLookupIden::Username, + ) + .expr_as( + Expr::col((Users::Table, Users::CreatedAt)), + UserLookupIden::CreatedAt, + ) + .expr_as( + Expr::col((Users::Table, Users::LockedAt)), + UserLookupIden::LockedAt, + ) + .expr_as( + Expr::col((Users::Table, Users::DeactivatedAt)), + UserLookupIden::DeactivatedAt, + ) + .expr_as( + Expr::col((Users::Table, Users::CanRequestAdmin)), + UserLookupIden::CanRequestAdmin, + ) + .expr_as( + Expr::col((Users::Table, Users::IsGuest)), + UserLookupIden::IsGuest, + ) + .from(Users::Table) + .apply_filter(filter) + .generate_pagination((Users::Table, Users::UserId), pagination) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination.process(edges).map(User::from); + + Ok(page) + } + + #[tracing::instrument( + name = "db.user.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count(&mut self, filter: UserFilter<'_>) -> Result { + let (sql, arguments) = Query::select() + .expr(Expr::col((Users::Table, Users::UserId)).count()) + .from(Users::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.user.acquire_lock_for_sync", + skip_all, + fields( + db.query.text, + user.id = %user.id, + ), + err, + )] + async fn acquire_lock_for_sync(&mut self, user: &User) -> Result<(), Self::Error> { + // XXX: this lock isn't stictly scoped to users, but as we don't use many + // postgres advisory locks, it's fine for now. Later on, we could use row-level + // locks to make sure we don't get into trouble + + // Convert the user ID to a u128 and grab the lower 64 bits + // As this includes 64bit of the random part of the ULID, it should be random + // enough to not collide + let lock_id = (u128::from(user.id) & 0xffff_ffff_ffff_ffff) as i64; + + // Use a PG advisory lock, which will be released when the transaction is + // committed or rolled back + sqlx::query!( + r#" + SELECT pg_advisory_xact_lock($1) + "#, + lock_id, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/user/password.rs b/matrix-authentication-service/crates/storage-pg/src/user/password.rs new file mode 100644 index 00000000..ca616127 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/user/password.rs @@ -0,0 +1,149 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{Clock, Password, User}; +use mas_storage::user::UserPasswordRepository; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, DatabaseInconsistencyError, tracing::ExecuteExt}; + +/// An implementation of [`UserPasswordRepository`] for a PostgreSQL connection +pub struct PgUserPasswordRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUserPasswordRepository<'c> { + /// Create a new [`PgUserPasswordRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct UserPasswordLookup { + user_password_id: Uuid, + hashed_password: String, + version: i32, + upgraded_from_id: Option, + created_at: DateTime, +} + +#[async_trait] +impl UserPasswordRepository for PgUserPasswordRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.user_password.active", + skip_all, + fields( + db.query.text, + %user.id, + %user.username, + ), + err, + )] + async fn active(&mut self, user: &User) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserPasswordLookup, + r#" + SELECT up.user_password_id + , up.hashed_password + , up.version + , up.upgraded_from_id + , up.created_at + FROM user_passwords up + WHERE up.user_id = $1 + ORDER BY up.created_at DESC + LIMIT 1 + "#, + Uuid::from(user.id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + let id = Ulid::from(res.user_password_id); + + let version = res.version.try_into().map_err(|e| { + DatabaseInconsistencyError::on("user_passwords") + .column("version") + .row(id) + .source(e) + })?; + + let upgraded_from_id = res.upgraded_from_id.map(Ulid::from); + let created_at = res.created_at; + let hashed_password = res.hashed_password; + + Ok(Some(Password { + id, + hashed_password, + version, + upgraded_from_id, + created_at, + })) + } + + #[tracing::instrument( + name = "db.user_password.add", + skip_all, + fields( + db.query.text, + %user.id, + %user.username, + user_password.id, + user_password.version = version, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + version: u16, + hashed_password: String, + upgraded_from: Option<&Password>, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("user_password.id", tracing::field::display(id)); + + let upgraded_from_id = upgraded_from.map(|p| p.id); + + sqlx::query!( + r#" + INSERT INTO user_passwords + (user_password_id, user_id, hashed_password, version, upgraded_from_id, created_at) + VALUES ($1, $2, $3, $4, $5, $6) + "#, + Uuid::from(id), + Uuid::from(user.id), + hashed_password, + i32::from(version), + upgraded_from_id.map(Uuid::from), + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(Password { + id, + hashed_password, + version, + upgraded_from_id, + created_at, + }) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/user/recovery.rs b/matrix-authentication-service/crates/storage-pg/src/user/recovery.rs new file mode 100644 index 00000000..800cdcb7 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/user/recovery.rs @@ -0,0 +1,379 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Duration, Utc}; +use mas_data_model::{Clock, UserEmail, UserRecoverySession, UserRecoveryTicket}; +use mas_storage::user::UserRecoveryRepository; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{DatabaseError, ExecuteExt}; + +/// An implementation of [`UserRecoveryRepository`] for a PostgreSQL connection +pub struct PgUserRecoveryRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUserRecoveryRepository<'c> { + /// Create a new [`PgUserRecoveryRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct UserRecoverySessionRow { + user_recovery_session_id: Uuid, + email: String, + user_agent: String, + ip_address: Option, + locale: String, + created_at: DateTime, + consumed_at: Option>, +} + +impl From for UserRecoverySession { + fn from(row: UserRecoverySessionRow) -> Self { + UserRecoverySession { + id: row.user_recovery_session_id.into(), + email: row.email, + user_agent: row.user_agent, + ip_address: row.ip_address, + locale: row.locale, + created_at: row.created_at, + consumed_at: row.consumed_at, + } + } +} + +struct UserRecoveryTicketRow { + user_recovery_ticket_id: Uuid, + user_recovery_session_id: Uuid, + user_email_id: Uuid, + ticket: String, + created_at: DateTime, + expires_at: DateTime, +} + +impl From for UserRecoveryTicket { + fn from(row: UserRecoveryTicketRow) -> Self { + Self { + id: row.user_recovery_ticket_id.into(), + user_recovery_session_id: row.user_recovery_session_id.into(), + user_email_id: row.user_email_id.into(), + ticket: row.ticket, + created_at: row.created_at, + expires_at: row.expires_at, + } + } +} + +#[async_trait] +impl UserRecoveryRepository for PgUserRecoveryRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.user_recovery.lookup_session", + skip_all, + fields( + db.query.text, + user_recovery_session.id = %id, + ), + err, + )] + async fn lookup_session( + &mut self, + id: Ulid, + ) -> Result, Self::Error> { + let row = sqlx::query_as!( + UserRecoverySessionRow, + r#" + SELECT + user_recovery_session_id + , email + , user_agent + , ip_address as "ip_address: IpAddr" + , locale + , created_at + , consumed_at + FROM user_recovery_sessions + WHERE user_recovery_session_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(row) = row else { + return Ok(None); + }; + + Ok(Some(row.into())) + } + + #[tracing::instrument( + name = "db.user_recovery.add_session", + skip_all, + fields( + db.query.text, + user_recovery_session.id, + user_recovery_session.email = email, + user_recovery_session.user_agent = user_agent, + user_recovery_session.ip_address = ip_address.map(|ip| ip.to_string()), + ) + )] + async fn add_session( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + email: String, + user_agent: String, + ip_address: Option, + locale: String, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("user_recovery_session.id", tracing::field::display(id)); + sqlx::query!( + r#" + INSERT INTO user_recovery_sessions ( + user_recovery_session_id + , email + , user_agent + , ip_address + , locale + , created_at + ) + VALUES ($1, $2, $3, $4, $5, $6) + "#, + Uuid::from(id), + &email, + &*user_agent, + ip_address as Option, + &locale, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + let user_recovery_session = UserRecoverySession { + id, + email, + user_agent, + ip_address, + locale, + created_at, + consumed_at: None, + }; + + Ok(user_recovery_session) + } + + #[tracing::instrument( + name = "db.user_recovery.find_ticket", + skip_all, + fields( + db.query.text, + user_recovery_ticket.id = ticket, + ), + err, + )] + async fn find_ticket( + &mut self, + ticket: &str, + ) -> Result, Self::Error> { + let row = sqlx::query_as!( + UserRecoveryTicketRow, + r#" + SELECT + user_recovery_ticket_id + , user_recovery_session_id + , user_email_id + , ticket + , created_at + , expires_at + FROM user_recovery_tickets + WHERE ticket = $1 + "#, + ticket, + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(row) = row else { + return Ok(None); + }; + + Ok(Some(row.into())) + } + + #[tracing::instrument( + name = "db.user_recovery.add_ticket", + skip_all, + fields( + db.query.text, + user_recovery_ticket.id, + user_recovery_ticket.id = ticket, + %user_recovery_session.id, + %user_email.id, + ) + )] + async fn add_ticket( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user_recovery_session: &UserRecoverySession, + user_email: &UserEmail, + ticket: String, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("user_recovery_ticket.id", tracing::field::display(id)); + + // TODO: move that to a parameter + let expires_at = created_at + Duration::minutes(10); + + sqlx::query!( + r#" + INSERT INTO user_recovery_tickets ( + user_recovery_ticket_id + , user_recovery_session_id + , user_email_id + , ticket + , created_at + , expires_at + ) + VALUES ($1, $2, $3, $4, $5, $6) + "#, + Uuid::from(id), + Uuid::from(user_recovery_session.id), + Uuid::from(user_email.id), + &ticket, + created_at, + expires_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + let ticket = UserRecoveryTicket { + id, + user_recovery_session_id: user_recovery_session.id, + user_email_id: user_email.id, + ticket, + created_at, + expires_at, + }; + + Ok(ticket) + } + + #[tracing::instrument( + name = "db.user_recovery.consume_ticket", + skip_all, + fields( + db.query.text, + %user_recovery_ticket.id, + user_email.id = %user_recovery_ticket.user_email_id, + %user_recovery_session.id, + %user_recovery_session.email, + ), + err, + )] + async fn consume_ticket( + &mut self, + clock: &dyn Clock, + user_recovery_ticket: UserRecoveryTicket, + mut user_recovery_session: UserRecoverySession, + ) -> Result { + // We don't really use the ticket, we just want to make sure we drop it + let _ = user_recovery_ticket; + + // This should have been checked by the caller + if user_recovery_session.consumed_at.is_some() { + return Err(DatabaseError::invalid_operation()); + } + + let consumed_at = clock.now(); + + let res = sqlx::query!( + r#" + UPDATE user_recovery_sessions + SET consumed_at = $1 + WHERE user_recovery_session_id = $2 + "#, + consumed_at, + Uuid::from(user_recovery_session.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + user_recovery_session.consumed_at = Some(consumed_at); + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(user_recovery_session) + } + + #[tracing::instrument( + name = "db.user_recovery.cleanup", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error> { + // Use ULID cursor-based pagination. Since ULIDs contain a timestamp, + // we can efficiently delete old sessions without needing an index. + // `MAX(uuid)` isn't a thing in Postgres, so we aggregate on the client side. + let res = sqlx::query_scalar!( + r#" + WITH to_delete AS ( + SELECT user_recovery_session_id + FROM user_recovery_sessions + WHERE ($1::uuid IS NULL OR user_recovery_session_id > $1) + AND user_recovery_session_id <= $2 + ORDER BY user_recovery_session_id + LIMIT $3 + ) + DELETE FROM user_recovery_sessions + USING to_delete + WHERE user_recovery_sessions.user_recovery_session_id = to_delete.user_recovery_session_id + RETURNING user_recovery_sessions.user_recovery_session_id + "#, + since.map(Uuid::from), + Uuid::from(until), + i64::try_from(limit).unwrap_or(i64::MAX) + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let count = res.len(); + let max_id = res.into_iter().max(); + + Ok((count, max_id.map(Ulid::from))) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/user/registration.rs b/matrix-authentication-service/crates/storage-pg/src/user/registration.rs new file mode 100644 index 00000000..fff1dd0a --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/user/registration.rs @@ -0,0 +1,1067 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + Clock, UpstreamOAuthAuthorizationSession, UserEmailAuthentication, UserRegistration, + UserRegistrationPassword, UserRegistrationToken, +}; +use mas_storage::user::UserRegistrationRepository; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use url::Url; +use uuid::Uuid; + +use crate::{DatabaseError, DatabaseInconsistencyError, ExecuteExt as _}; + +/// An implementation of [`UserRegistrationRepository`] for a PostgreSQL +/// connection +pub struct PgUserRegistrationRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUserRegistrationRepository<'c> { + /// Create a new [`PgUserRegistrationRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +struct UserRegistrationLookup { + user_registration_id: Uuid, + ip_address: Option, + user_agent: Option, + post_auth_action: Option, + username: String, + display_name: Option, + terms_url: Option, + email_authentication_id: Option, + user_registration_token_id: Option, + hashed_password: Option, + hashed_password_version: Option, + upstream_oauth_authorization_session_id: Option, + created_at: DateTime, + completed_at: Option>, +} + +impl TryFrom for UserRegistration { + type Error = DatabaseInconsistencyError; + + fn try_from(value: UserRegistrationLookup) -> Result { + let id = Ulid::from(value.user_registration_id); + + let password = match (value.hashed_password, value.hashed_password_version) { + (Some(hashed_password), Some(version)) => { + let version = version.try_into().map_err(|e| { + DatabaseInconsistencyError::on("user_registrations") + .column("hashed_password_version") + .row(id) + .source(e) + })?; + + Some(UserRegistrationPassword { + hashed_password, + version, + }) + } + (None, None) => None, + _ => { + return Err(DatabaseInconsistencyError::on("user_registrations") + .column("hashed_password") + .row(id)); + } + }; + + let terms_url = value + .terms_url + .map(|u| u.parse()) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("user_registrations") + .column("terms_url") + .row(id) + .source(e) + })?; + + Ok(UserRegistration { + id, + ip_address: value.ip_address, + user_agent: value.user_agent, + post_auth_action: value.post_auth_action, + username: value.username, + display_name: value.display_name, + terms_url, + email_authentication_id: value.email_authentication_id.map(Ulid::from), + user_registration_token_id: value.user_registration_token_id.map(Ulid::from), + password, + upstream_oauth_authorization_session_id: value + .upstream_oauth_authorization_session_id + .map(Ulid::from), + created_at: value.created_at, + completed_at: value.completed_at, + }) + } +} + +#[async_trait] +impl UserRegistrationRepository for PgUserRegistrationRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.user_registration.lookup", + skip_all, + fields( + db.query.text, + user_registration.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserRegistrationLookup, + r#" + SELECT user_registration_id + , ip_address as "ip_address: IpAddr" + , user_agent + , post_auth_action + , username + , display_name + , terms_url + , email_authentication_id + , user_registration_token_id + , hashed_password + , hashed_password_version + , upstream_oauth_authorization_session_id + , created_at + , completed_at + FROM user_registrations + WHERE user_registration_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.user_registration.add", + skip_all, + fields( + db.query.text, + user_registration.id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + username: String, + ip_address: Option, + user_agent: Option, + post_auth_action: Option, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("user_registration.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO user_registrations + ( user_registration_id + , ip_address + , user_agent + , post_auth_action + , username + , created_at + ) + VALUES ($1, $2, $3, $4, $5, $6) + "#, + Uuid::from(id), + ip_address as Option, + user_agent.as_deref(), + post_auth_action, + username, + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(UserRegistration { + id, + ip_address, + user_agent, + post_auth_action, + created_at, + completed_at: None, + username, + display_name: None, + terms_url: None, + email_authentication_id: None, + user_registration_token_id: None, + password: None, + upstream_oauth_authorization_session_id: None, + }) + } + + #[tracing::instrument( + name = "db.user_registration.set_display_name", + skip_all, + fields( + db.query.text, + user_registration.id = %user_registration.id, + user_registration.display_name = display_name, + ), + err, + )] + async fn set_display_name( + &mut self, + mut user_registration: UserRegistration, + display_name: String, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE user_registrations + SET display_name = $2 + WHERE user_registration_id = $1 AND completed_at IS NULL + "#, + Uuid::from(user_registration.id), + display_name, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user_registration.display_name = Some(display_name); + + Ok(user_registration) + } + + #[tracing::instrument( + name = "db.user_registration.set_terms_url", + skip_all, + fields( + db.query.text, + user_registration.id = %user_registration.id, + user_registration.terms_url = %terms_url, + ), + err, + )] + async fn set_terms_url( + &mut self, + mut user_registration: UserRegistration, + terms_url: Url, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE user_registrations + SET terms_url = $2 + WHERE user_registration_id = $1 AND completed_at IS NULL + "#, + Uuid::from(user_registration.id), + terms_url.as_str(), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user_registration.terms_url = Some(terms_url); + + Ok(user_registration) + } + + #[tracing::instrument( + name = "db.user_registration.set_email_authentication", + skip_all, + fields( + db.query.text, + %user_registration.id, + %user_email_authentication.id, + %user_email_authentication.email, + ), + err, + )] + async fn set_email_authentication( + &mut self, + mut user_registration: UserRegistration, + user_email_authentication: &UserEmailAuthentication, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE user_registrations + SET email_authentication_id = $2 + WHERE user_registration_id = $1 AND completed_at IS NULL + "#, + Uuid::from(user_registration.id), + Uuid::from(user_email_authentication.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user_registration.email_authentication_id = Some(user_email_authentication.id); + + Ok(user_registration) + } + + #[tracing::instrument( + name = "db.user_registration.set_password", + skip_all, + fields( + db.query.text, + user_registration.id = %user_registration.id, + user_registration.hashed_password = hashed_password, + user_registration.hashed_password_version = version, + ), + err, + )] + async fn set_password( + &mut self, + mut user_registration: UserRegistration, + hashed_password: String, + version: u16, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE user_registrations + SET hashed_password = $2, hashed_password_version = $3 + WHERE user_registration_id = $1 AND completed_at IS NULL + "#, + Uuid::from(user_registration.id), + hashed_password, + i32::from(version), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user_registration.password = Some(UserRegistrationPassword { + hashed_password, + version, + }); + + Ok(user_registration) + } + + #[tracing::instrument( + name = "db.user_registration.set_registration_token", + skip_all, + fields( + db.query.text, + %user_registration.id, + %user_registration_token.id, + ), + err, + )] + async fn set_registration_token( + &mut self, + mut user_registration: UserRegistration, + user_registration_token: &UserRegistrationToken, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE user_registrations + SET user_registration_token_id = $2 + WHERE user_registration_id = $1 AND completed_at IS NULL + "#, + Uuid::from(user_registration.id), + Uuid::from(user_registration_token.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user_registration.user_registration_token_id = Some(user_registration_token.id); + + Ok(user_registration) + } + + #[tracing::instrument( + name = "db.user_registration.set_upstream_oauth_authorization_session", + skip_all, + fields( + db.query.text, + %user_registration.id, + %upstream_oauth_authorization_session.id, + ), + err, + )] + async fn set_upstream_oauth_authorization_session( + &mut self, + mut user_registration: UserRegistration, + upstream_oauth_authorization_session: &UpstreamOAuthAuthorizationSession, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE user_registrations + SET upstream_oauth_authorization_session_id = $2 + WHERE user_registration_id = $1 AND completed_at IS NULL + "#, + Uuid::from(user_registration.id), + Uuid::from(upstream_oauth_authorization_session.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user_registration.upstream_oauth_authorization_session_id = + Some(upstream_oauth_authorization_session.id); + + Ok(user_registration) + } + + #[tracing::instrument( + name = "db.user_registration.complete", + skip_all, + fields( + db.query.text, + user_registration.id = %user_registration.id, + ), + err, + )] + async fn complete( + &mut self, + clock: &dyn Clock, + mut user_registration: UserRegistration, + ) -> Result { + let completed_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE user_registrations + SET completed_at = $2 + WHERE user_registration_id = $1 AND completed_at IS NULL + "#, + Uuid::from(user_registration.id), + completed_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + user_registration.completed_at = Some(completed_at); + + Ok(user_registration) + } + + #[tracing::instrument( + name = "db.user_registration.cleanup", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error> { + // `MAX(uuid)` isn't a thing in Postgres, so we can't just re-select the + // deleted rows and do a MAX on the `user_registration_id`. + // Instead, we do the aggregation on the client side, which is a little + // less efficient, but good enough. + let res = sqlx::query_scalar!( + r#" + WITH to_delete AS ( + SELECT user_registration_id + FROM user_registrations + WHERE ($1::uuid IS NULL OR user_registration_id > $1) + AND user_registration_id <= $2 + ORDER BY user_registration_id + LIMIT $3 + ) + DELETE FROM user_registrations + USING to_delete + WHERE user_registrations.user_registration_id = to_delete.user_registration_id + RETURNING user_registrations.user_registration_id + "#, + since.map(Uuid::from), + Uuid::from(until), + i64::try_from(limit).unwrap_or(i64::MAX) + ) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let count = res.len(); + let max_id = res.into_iter().max(); + + Ok((count, max_id.map(Ulid::from))) + } +} + +#[cfg(test)] +mod tests { + use std::net::{IpAddr, Ipv4Addr}; + + use mas_data_model::{ + Clock, UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderDiscoveryMode, + UpstreamOAuthProviderOnBackchannelLogout, UpstreamOAuthProviderPkceMode, + UpstreamOAuthProviderTokenAuthMethod, UserRegistrationPassword, clock::MockClock, + }; + use mas_iana::jose::JsonWebSignatureAlg; + use mas_storage::upstream_oauth2::UpstreamOAuthProviderParams; + use oauth2_types::scope::Scope; + use rand::SeedableRng; + use rand_chacha::ChaChaRng; + use sqlx::PgPool; + + use crate::PgRepository; + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_create_lookup_complete(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + let registration = repo + .user_registration() + .add(&mut rng, &clock, "alice".to_owned(), None, None, None) + .await + .unwrap(); + + assert_eq!(registration.created_at, clock.now()); + assert_eq!(registration.completed_at, None); + assert_eq!(registration.username, "alice"); + assert_eq!(registration.display_name, None); + assert_eq!(registration.terms_url, None); + assert_eq!(registration.email_authentication_id, None); + assert_eq!(registration.password, None); + assert_eq!(registration.user_agent, None); + assert_eq!(registration.ip_address, None); + assert_eq!(registration.post_auth_action, None); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!(lookup.id, registration.id); + assert_eq!(lookup.created_at, registration.created_at); + assert_eq!(lookup.completed_at, registration.completed_at); + assert_eq!(lookup.username, registration.username); + assert_eq!(lookup.display_name, registration.display_name); + assert_eq!(lookup.terms_url, registration.terms_url); + assert_eq!( + lookup.email_authentication_id, + registration.email_authentication_id + ); + assert_eq!(lookup.password, registration.password); + assert_eq!(lookup.user_agent, registration.user_agent); + assert_eq!(lookup.ip_address, registration.ip_address); + assert_eq!(lookup.post_auth_action, registration.post_auth_action); + + // Mark the registration as completed + let registration = repo + .user_registration() + .complete(&clock, registration) + .await + .unwrap(); + assert_eq!(registration.completed_at, Some(clock.now())); + + // Lookup the registration again + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + assert_eq!(lookup.completed_at, registration.completed_at); + + // Do it again, it should fail + let res = repo + .user_registration() + .complete(&clock, registration) + .await; + assert!(res.is_err()); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_create_useragent_ipaddress(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + let registration = repo + .user_registration() + .add( + &mut rng, + &clock, + "alice".to_owned(), + Some(IpAddr::V4(Ipv4Addr::LOCALHOST)), + Some("Mozilla/5.0".to_owned()), + Some(serde_json::json!({"action": "continue_compat_sso_login", "id": "01FSHN9AG0MKGTBNZ16RDR3PVY"})), + ) + .await + .unwrap(); + + assert_eq!(registration.user_agent, Some("Mozilla/5.0".to_owned())); + assert_eq!( + registration.ip_address, + Some(IpAddr::V4(Ipv4Addr::LOCALHOST)) + ); + assert_eq!( + registration.post_auth_action, + Some( + serde_json::json!({"action": "continue_compat_sso_login", "id": "01FSHN9AG0MKGTBNZ16RDR3PVY"}) + ) + ); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!(lookup.user_agent, registration.user_agent); + assert_eq!(lookup.ip_address, registration.ip_address); + assert_eq!(lookup.post_auth_action, registration.post_auth_action); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_set_display_name(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + let registration = repo + .user_registration() + .add(&mut rng, &clock, "alice".to_owned(), None, None, None) + .await + .unwrap(); + + assert_eq!(registration.display_name, None); + + let registration = repo + .user_registration() + .set_display_name(registration, "Alice".to_owned()) + .await + .unwrap(); + + assert_eq!(registration.display_name, Some("Alice".to_owned())); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!(lookup.display_name, registration.display_name); + + // Setting it again should work + let registration = repo + .user_registration() + .set_display_name(registration, "Bob".to_owned()) + .await + .unwrap(); + + assert_eq!(registration.display_name, Some("Bob".to_owned())); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!(lookup.display_name, registration.display_name); + + // Can't set it once completed + let registration = repo + .user_registration() + .complete(&clock, registration) + .await + .unwrap(); + + let res = repo + .user_registration() + .set_display_name(registration, "Charlie".to_owned()) + .await; + assert!(res.is_err()); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_set_terms_url(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + let registration = repo + .user_registration() + .add(&mut rng, &clock, "alice".to_owned(), None, None, None) + .await + .unwrap(); + + assert_eq!(registration.terms_url, None); + + let registration = repo + .user_registration() + .set_terms_url(registration, "https://example.com/terms".parse().unwrap()) + .await + .unwrap(); + + assert_eq!( + registration.terms_url, + Some("https://example.com/terms".parse().unwrap()) + ); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!(lookup.terms_url, registration.terms_url); + + // Setting it again should work + let registration = repo + .user_registration() + .set_terms_url(registration, "https://example.com/terms2".parse().unwrap()) + .await + .unwrap(); + + assert_eq!( + registration.terms_url, + Some("https://example.com/terms2".parse().unwrap()) + ); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!(lookup.terms_url, registration.terms_url); + + // Can't set it once completed + let registration = repo + .user_registration() + .complete(&clock, registration) + .await + .unwrap(); + + let res = repo + .user_registration() + .set_terms_url(registration, "https://example.com/terms3".parse().unwrap()) + .await; + assert!(res.is_err()); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_set_email_authentication(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + let registration = repo + .user_registration() + .add(&mut rng, &clock, "alice".to_owned(), None, None, None) + .await + .unwrap(); + + assert_eq!(registration.email_authentication_id, None); + + let authentication = repo + .user_email() + .add_authentication_for_registration( + &mut rng, + &clock, + "alice@example.com".to_owned(), + ®istration, + ) + .await + .unwrap(); + + let registration = repo + .user_registration() + .set_email_authentication(registration, &authentication) + .await + .unwrap(); + + assert_eq!( + registration.email_authentication_id, + Some(authentication.id) + ); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!( + lookup.email_authentication_id, + registration.email_authentication_id + ); + + // Setting it again should work + let registration = repo + .user_registration() + .set_email_authentication(registration, &authentication) + .await + .unwrap(); + + assert_eq!( + registration.email_authentication_id, + Some(authentication.id) + ); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!( + lookup.email_authentication_id, + registration.email_authentication_id + ); + + // Can't set it once completed + let registration = repo + .user_registration() + .complete(&clock, registration) + .await + .unwrap(); + + let res = repo + .user_registration() + .set_email_authentication(registration, &authentication) + .await; + assert!(res.is_err()); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_set_password(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + let registration = repo + .user_registration() + .add(&mut rng, &clock, "alice".to_owned(), None, None, None) + .await + .unwrap(); + + assert_eq!(registration.password, None); + + let registration = repo + .user_registration() + .set_password(registration, "fakehashedpassword".to_owned(), 1) + .await + .unwrap(); + + assert_eq!( + registration.password, + Some(UserRegistrationPassword { + hashed_password: "fakehashedpassword".to_owned(), + version: 1, + }) + ); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!(lookup.password, registration.password); + + // Setting it again should work + let registration = repo + .user_registration() + .set_password(registration, "fakehashedpassword2".to_owned(), 2) + .await + .unwrap(); + + assert_eq!( + registration.password, + Some(UserRegistrationPassword { + hashed_password: "fakehashedpassword2".to_owned(), + version: 2, + }) + ); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!(lookup.password, registration.password); + + // Can't set it once completed + let registration = repo + .user_registration() + .complete(&clock, registration) + .await + .unwrap(); + + let res = repo + .user_registration() + .set_password(registration, "fakehashedpassword3".to_owned(), 3) + .await; + assert!(res.is_err()); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_set_upstream_oauth_session(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + let registration = repo + .user_registration() + .add(&mut rng, &clock, "alice".to_owned(), None, None, None) + .await + .unwrap(); + + assert_eq!(registration.upstream_oauth_authorization_session_id, None); + + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + scope: Scope::from_iter([oauth2_types::scope::OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + jwks_uri_override: None, + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + + let session = repo + .upstream_oauth_session() + .add(&mut rng, &clock, &provider, "state".to_owned(), None, None) + .await + .unwrap(); + + let registration = repo + .user_registration() + .set_upstream_oauth_authorization_session(registration, &session) + .await + .unwrap(); + + assert_eq!( + registration.upstream_oauth_authorization_session_id, + Some(session.id) + ); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!( + lookup.upstream_oauth_authorization_session_id, + registration.upstream_oauth_authorization_session_id + ); + + // Setting it again should work + let registration = repo + .user_registration() + .set_upstream_oauth_authorization_session(registration, &session) + .await + .unwrap(); + + assert_eq!( + registration.upstream_oauth_authorization_session_id, + Some(session.id) + ); + + let lookup = repo + .user_registration() + .lookup(registration.id) + .await + .unwrap() + .unwrap(); + + assert_eq!( + lookup.upstream_oauth_authorization_session_id, + registration.upstream_oauth_authorization_session_id + ); + + // Can't set it once completed + let registration = repo + .user_registration() + .complete(&clock, registration) + .await + .unwrap(); + + let res = repo + .user_registration() + .set_upstream_oauth_authorization_session(registration, &session) + .await; + assert!(res.is_err()); + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/user/registration_token.rs b/matrix-authentication-service/crates/storage-pg/src/user/registration_token.rs new file mode 100644 index 00000000..5c9231aa --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/user/registration_token.rs @@ -0,0 +1,960 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{Clock, UserRegistrationToken}; +use mas_storage::{ + Page, Pagination, + pagination::Node, + user::{UserRegistrationTokenFilter, UserRegistrationTokenRepository}, +}; +use rand::RngCore; +use sea_query::{Condition, Expr, PostgresQueryBuilder, Query, enum_def}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseInconsistencyError, + errors::DatabaseError, + filter::{Filter, StatementExt}, + iden::UserRegistrationTokens, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +/// An implementation of [`mas_storage::user::UserRegistrationTokenRepository`] +/// for a PostgreSQL connection +pub struct PgUserRegistrationTokenRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUserRegistrationTokenRepository<'c> { + /// Create a new [`PgUserRegistrationTokenRepository`] from an active + /// PostgreSQL connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[derive(Debug, Clone, sqlx::FromRow)] +#[enum_def] +struct UserRegistrationTokenLookup { + user_registration_token_id: Uuid, + token: String, + usage_limit: Option, + times_used: i32, + created_at: DateTime, + last_used_at: Option>, + expires_at: Option>, + revoked_at: Option>, +} + +impl Node for UserRegistrationTokenLookup { + fn cursor(&self) -> Ulid { + self.user_registration_token_id.into() + } +} + +impl Filter for UserRegistrationTokenFilter { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.has_been_used().map(|has_been_used| { + if has_been_used { + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::TimesUsed, + )) + .gt(0) + } else { + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::TimesUsed, + )) + .eq(0) + } + })) + .add_option(self.is_revoked().map(|is_revoked| { + if is_revoked { + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::RevokedAt, + )) + .is_not_null() + } else { + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::RevokedAt, + )) + .is_null() + } + })) + .add_option(self.is_expired().map(|is_expired| { + if is_expired { + Condition::all() + .add( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::ExpiresAt, + )) + .is_not_null(), + ) + .add( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::ExpiresAt, + )) + .lt(Expr::val(self.now())), + ) + } else { + Condition::any() + .add( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::ExpiresAt, + )) + .is_null(), + ) + .add( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::ExpiresAt, + )) + .gte(Expr::val(self.now())), + ) + } + })) + .add_option(self.is_valid().map(|is_valid| { + let valid = Condition::all() + // Has not reached its usage limit + .add( + Condition::any() + .add( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::UsageLimit, + )) + .is_null(), + ) + .add( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::TimesUsed, + )) + .lt(Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::UsageLimit, + ))), + ), + ) + // Has not been revoked + .add( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::RevokedAt, + )) + .is_null(), + ) + // Has not expired + .add( + Condition::any() + .add( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::ExpiresAt, + )) + .is_null(), + ) + .add( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::ExpiresAt, + )) + .gte(Expr::val(self.now())), + ), + ); + + if is_valid { valid } else { valid.not() } + })) + } +} + +impl TryFrom for UserRegistrationToken { + type Error = DatabaseInconsistencyError; + + fn try_from(res: UserRegistrationTokenLookup) -> Result { + let id = Ulid::from(res.user_registration_token_id); + + let usage_limit = res + .usage_limit + .map(u32::try_from) + .transpose() + .map_err(|e| { + DatabaseInconsistencyError::on("user_registration_tokens") + .column("usage_limit") + .row(id) + .source(e) + })?; + + let times_used = res.times_used.try_into().map_err(|e| { + DatabaseInconsistencyError::on("user_registration_tokens") + .column("times_used") + .row(id) + .source(e) + })?; + + Ok(UserRegistrationToken { + id, + token: res.token, + usage_limit, + times_used, + created_at: res.created_at, + last_used_at: res.last_used_at, + expires_at: res.expires_at, + revoked_at: res.revoked_at, + }) + } +} + +#[async_trait] +impl UserRegistrationTokenRepository for PgUserRegistrationTokenRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.user_registration_token.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: UserRegistrationTokenFilter, + pagination: Pagination, + ) -> Result, Self::Error> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::UserRegistrationTokenId, + )), + UserRegistrationTokenLookupIden::UserRegistrationTokenId, + ) + .expr_as( + Expr::col((UserRegistrationTokens::Table, UserRegistrationTokens::Token)), + UserRegistrationTokenLookupIden::Token, + ) + .expr_as( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::UsageLimit, + )), + UserRegistrationTokenLookupIden::UsageLimit, + ) + .expr_as( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::TimesUsed, + )), + UserRegistrationTokenLookupIden::TimesUsed, + ) + .expr_as( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::CreatedAt, + )), + UserRegistrationTokenLookupIden::CreatedAt, + ) + .expr_as( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::LastUsedAt, + )), + UserRegistrationTokenLookupIden::LastUsedAt, + ) + .expr_as( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::ExpiresAt, + )), + UserRegistrationTokenLookupIden::ExpiresAt, + ) + .expr_as( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::RevokedAt, + )), + UserRegistrationTokenLookupIden::RevokedAt, + ) + .from(UserRegistrationTokens::Table) + .apply_filter(filter) + .generate_pagination( + ( + UserRegistrationTokens::Table, + UserRegistrationTokens::UserRegistrationTokenId, + ), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination + .process(edges) + .try_map(UserRegistrationToken::try_from)?; + + Ok(page) + } + + #[tracing::instrument( + name = "db.user_registration_token.count", + skip_all, + fields( + db.query.text, + user_registration_token.filter = ?filter, + ), + err, + )] + async fn count(&mut self, filter: UserRegistrationTokenFilter) -> Result { + let (sql, values) = Query::select() + .expr( + Expr::col(( + UserRegistrationTokens::Table, + UserRegistrationTokens::UserRegistrationTokenId, + )) + .count(), + ) + .from(UserRegistrationTokens::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, values) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.user_registration_token.lookup", + skip_all, + fields( + db.query.text, + user_registration_token.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserRegistrationTokenLookup, + r#" + SELECT user_registration_token_id, + token, + usage_limit, + times_used, + created_at, + last_used_at, + expires_at, + revoked_at + FROM user_registration_tokens + WHERE user_registration_token_id = $1 + "#, + Uuid::from(id) + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { + return Ok(None); + }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.user_registration_token.find_by_token", + skip_all, + fields( + db.query.text, + token = %token, + ), + err, + )] + async fn find_by_token( + &mut self, + token: &str, + ) -> Result, Self::Error> { + let res = sqlx::query_as!( + UserRegistrationTokenLookup, + r#" + SELECT user_registration_token_id, + token, + usage_limit, + times_used, + created_at, + last_used_at, + expires_at, + revoked_at + FROM user_registration_tokens + WHERE token = $1 + "#, + token + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { + return Ok(None); + }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.user_registration_token.add", + skip_all, + fields( + db.query.text, + user_registration_token.token = %token, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn mas_data_model::Clock, + token: String, + usage_limit: Option, + expires_at: Option>, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + + let usage_limit_i32 = usage_limit + .map(i32::try_from) + .transpose() + .map_err(DatabaseError::to_invalid_operation)?; + + sqlx::query!( + r#" + INSERT INTO user_registration_tokens + (user_registration_token_id, token, usage_limit, created_at, expires_at) + VALUES ($1, $2, $3, $4, $5) + "#, + Uuid::from(id), + &token, + usage_limit_i32, + created_at, + expires_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(UserRegistrationToken { + id, + token, + usage_limit, + times_used: 0, + created_at, + last_used_at: None, + expires_at, + revoked_at: None, + }) + } + + #[tracing::instrument( + name = "db.user_registration_token.use_token", + skip_all, + fields( + db.query.text, + user_registration_token.id = %token.id, + ), + err, + )] + async fn use_token( + &mut self, + clock: &dyn Clock, + token: UserRegistrationToken, + ) -> Result { + let now = clock.now(); + let new_times_used = sqlx::query_scalar!( + r#" + UPDATE user_registration_tokens + SET times_used = times_used + 1, + last_used_at = $2 + WHERE user_registration_token_id = $1 AND revoked_at IS NULL + RETURNING times_used + "#, + Uuid::from(token.id), + now, + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + let new_times_used = new_times_used + .try_into() + .map_err(DatabaseError::to_invalid_operation)?; + + Ok(UserRegistrationToken { + times_used: new_times_used, + last_used_at: Some(now), + ..token + }) + } + + #[tracing::instrument( + name = "db.user_registration_token.revoke", + skip_all, + fields( + db.query.text, + user_registration_token.id = %token.id, + ), + err, + )] + async fn revoke( + &mut self, + clock: &dyn Clock, + mut token: UserRegistrationToken, + ) -> Result { + let revoked_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE user_registration_tokens + SET revoked_at = $2 + WHERE user_registration_token_id = $1 + "#, + Uuid::from(token.id), + revoked_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + token.revoked_at = Some(revoked_at); + + Ok(token) + } + + #[tracing::instrument( + name = "db.user_registration_token.unrevoke", + skip_all, + fields( + db.query.text, + user_registration_token.id = %token.id, + ), + err, + )] + async fn unrevoke( + &mut self, + mut token: UserRegistrationToken, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE user_registration_tokens + SET revoked_at = NULL + WHERE user_registration_token_id = $1 + "#, + Uuid::from(token.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + token.revoked_at = None; + + Ok(token) + } + + #[tracing::instrument( + name = "db.user_registration_token.set_expiry", + skip_all, + fields( + db.query.text, + user_registration_token.id = %token.id, + ), + err, + )] + async fn set_expiry( + &mut self, + mut token: UserRegistrationToken, + expires_at: Option>, + ) -> Result { + let res = sqlx::query!( + r#" + UPDATE user_registration_tokens + SET expires_at = $2 + WHERE user_registration_token_id = $1 + "#, + Uuid::from(token.id), + expires_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + token.expires_at = expires_at; + + Ok(token) + } + + #[tracing::instrument( + name = "db.user_registration_token.set_usage_limit", + skip_all, + fields( + db.query.text, + user_registration_token.id = %token.id, + ), + err, + )] + async fn set_usage_limit( + &mut self, + mut token: UserRegistrationToken, + usage_limit: Option, + ) -> Result { + let usage_limit_i32 = usage_limit + .map(i32::try_from) + .transpose() + .map_err(DatabaseError::to_invalid_operation)?; + + let res = sqlx::query!( + r#" + UPDATE user_registration_tokens + SET usage_limit = $2 + WHERE user_registration_token_id = $1 + "#, + Uuid::from(token.id), + usage_limit_i32, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, 1)?; + + token.usage_limit = usage_limit; + + Ok(token) + } +} + +#[cfg(test)] +mod tests { + use chrono::Duration; + use mas_data_model::{Clock as _, clock::MockClock}; + use mas_storage::{Pagination, user::UserRegistrationTokenFilter}; + use rand::SeedableRng; + use rand_chacha::ChaChaRng; + use sqlx::PgPool; + + use crate::PgRepository; + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_unrevoke(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Create a token + let token = repo + .user_registration_token() + .add(&mut rng, &clock, "test_token".to_owned(), None, None) + .await + .unwrap(); + + // Revoke the token + let revoked_token = repo + .user_registration_token() + .revoke(&clock, token) + .await + .unwrap(); + + // Verify it's revoked + assert!(revoked_token.revoked_at.is_some()); + + // Unrevoke the token + let unrevoked_token = repo + .user_registration_token() + .unrevoke(revoked_token) + .await + .unwrap(); + + // Verify it's no longer revoked + assert!(unrevoked_token.revoked_at.is_none()); + + // Check that we can find it with the non-revoked filter + let non_revoked_filter = UserRegistrationTokenFilter::new(clock.now()).with_revoked(false); + let page = repo + .user_registration_token() + .list(non_revoked_filter, Pagination::first(10)) + .await + .unwrap(); + + assert!(page.edges.iter().any(|t| t.node.id == unrevoked_token.id)); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_set_expiry(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Create a token without expiry + let token = repo + .user_registration_token() + .add(&mut rng, &clock, "test_token_expiry".to_owned(), None, None) + .await + .unwrap(); + + // Verify it has no expiration + assert!(token.expires_at.is_none()); + + // Set an expiration + let future_time = clock.now() + Duration::days(30); + let updated_token = repo + .user_registration_token() + .set_expiry(token, Some(future_time)) + .await + .unwrap(); + + // Verify expiration is set + assert_eq!(updated_token.expires_at, Some(future_time)); + + // Remove the expiration + let final_token = repo + .user_registration_token() + .set_expiry(updated_token, None) + .await + .unwrap(); + + // Verify expiration is removed + assert!(final_token.expires_at.is_none()); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_set_usage_limit(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Create a token without usage limit + let token = repo + .user_registration_token() + .add(&mut rng, &clock, "test_token_limit".to_owned(), None, None) + .await + .unwrap(); + + // Verify it has no usage limit + assert!(token.usage_limit.is_none()); + + // Set a usage limit + let updated_token = repo + .user_registration_token() + .set_usage_limit(token, Some(5)) + .await + .unwrap(); + + // Verify usage limit is set + assert_eq!(updated_token.usage_limit, Some(5)); + + // Change the usage limit + let changed_token = repo + .user_registration_token() + .set_usage_limit(updated_token, Some(10)) + .await + .unwrap(); + + // Verify usage limit is changed + assert_eq!(changed_token.usage_limit, Some(10)); + + // Remove the usage limit + let final_token = repo + .user_registration_token() + .set_usage_limit(changed_token, None) + .await + .unwrap(); + + // Verify usage limit is removed + assert!(final_token.usage_limit.is_none()); + } + + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_list_and_count(pool: PgPool) { + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + + // Create different types of tokens + // 1. A regular token + let _token1 = repo + .user_registration_token() + .add(&mut rng, &clock, "token1".to_owned(), None, None) + .await + .unwrap(); + + // 2. A token that has been used + let token2 = repo + .user_registration_token() + .add(&mut rng, &clock, "token2".to_owned(), None, None) + .await + .unwrap(); + let token2 = repo + .user_registration_token() + .use_token(&clock, token2) + .await + .unwrap(); + + // 3. A token that is expired + let past_time = clock.now() - Duration::days(1); + let token3 = repo + .user_registration_token() + .add(&mut rng, &clock, "token3".to_owned(), None, Some(past_time)) + .await + .unwrap(); + + // 4. A token that is revoked + let token4 = repo + .user_registration_token() + .add(&mut rng, &clock, "token4".to_owned(), None, None) + .await + .unwrap(); + let token4 = repo + .user_registration_token() + .revoke(&clock, token4) + .await + .unwrap(); + + // Test list with empty filter + let empty_filter = UserRegistrationTokenFilter::new(clock.now()); + let page = repo + .user_registration_token() + .list(empty_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 4); + + // Test count with empty filter + let count = repo + .user_registration_token() + .count(empty_filter) + .await + .unwrap(); + assert_eq!(count, 4); + + // Test has_been_used filter + let used_filter = UserRegistrationTokenFilter::new(clock.now()).with_been_used(true); + let page = repo + .user_registration_token() + .list(used_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 1); + assert_eq!(page.edges[0].node.id, token2.id); + + // Test unused filter + let unused_filter = UserRegistrationTokenFilter::new(clock.now()).with_been_used(false); + let page = repo + .user_registration_token() + .list(unused_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 3); + + // Test is_expired filter + let expired_filter = UserRegistrationTokenFilter::new(clock.now()).with_expired(true); + let page = repo + .user_registration_token() + .list(expired_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 1); + assert_eq!(page.edges[0].node.id, token3.id); + + let not_expired_filter = UserRegistrationTokenFilter::new(clock.now()).with_expired(false); + let page = repo + .user_registration_token() + .list(not_expired_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 3); + + // Test is_revoked filter + let revoked_filter = UserRegistrationTokenFilter::new(clock.now()).with_revoked(true); + let page = repo + .user_registration_token() + .list(revoked_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 1); + assert_eq!(page.edges[0].node.id, token4.id); + + let not_revoked_filter = UserRegistrationTokenFilter::new(clock.now()).with_revoked(false); + let page = repo + .user_registration_token() + .list(not_revoked_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 3); + + // Test is_valid filter + let valid_filter = UserRegistrationTokenFilter::new(clock.now()).with_valid(true); + let page = repo + .user_registration_token() + .list(valid_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 2); + + let invalid_filter = UserRegistrationTokenFilter::new(clock.now()).with_valid(false); + let page = repo + .user_registration_token() + .list(invalid_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 2); + + // Test combined filters + let combined_filter = UserRegistrationTokenFilter::new(clock.now()) + .with_been_used(false) + .with_revoked(true); + let page = repo + .user_registration_token() + .list(combined_filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 1); + assert_eq!(page.edges[0].node.id, token4.id); + + // Test pagination + let page = repo + .user_registration_token() + .list(empty_filter, Pagination::first(2)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 2); + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/user/session.rs b/matrix-authentication-service/crates/storage-pg/src/user/session.rs new file mode 100644 index 00000000..dba42726 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/user/session.rs @@ -0,0 +1,751 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + Authentication, AuthenticationMethod, BrowserSession, Clock, Password, + UpstreamOAuthAuthorizationSession, User, +}; +use mas_storage::{ + Page, Pagination, + pagination::Node, + user::{BrowserSessionFilter, BrowserSessionRepository}, +}; +use rand::RngCore; +use sea_query::{Expr, PostgresQueryBuilder, Query}; +use sea_query_binder::SqlxBinder; +use sqlx::PgConnection; +use ulid::Ulid; +use uuid::Uuid; + +use crate::{ + DatabaseError, DatabaseInconsistencyError, + filter::StatementExt, + iden::{UpstreamOAuthAuthorizationSessions, UserSessions, Users}, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +/// An implementation of [`BrowserSessionRepository`] for a PostgreSQL +/// connection +pub struct PgBrowserSessionRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgBrowserSessionRepository<'c> { + /// Create a new [`PgBrowserSessionRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[allow(clippy::struct_field_names)] +#[derive(sqlx::FromRow)] +#[sea_query::enum_def] +struct SessionLookup { + user_session_id: Uuid, + user_session_created_at: DateTime, + user_session_finished_at: Option>, + user_session_user_agent: Option, + user_session_last_active_at: Option>, + user_session_last_active_ip: Option, + user_id: Uuid, + user_username: String, + user_created_at: DateTime, + user_locked_at: Option>, + user_deactivated_at: Option>, + user_can_request_admin: bool, + user_is_guest: bool, +} + +impl Node for SessionLookup { + fn cursor(&self) -> Ulid { + self.user_id.into() + } +} + +impl TryFrom for BrowserSession { + type Error = DatabaseInconsistencyError; + + fn try_from(value: SessionLookup) -> Result { + let id = Ulid::from(value.user_id); + let user = User { + id, + username: value.user_username, + sub: id.to_string(), + created_at: value.user_created_at, + locked_at: value.user_locked_at, + deactivated_at: value.user_deactivated_at, + can_request_admin: value.user_can_request_admin, + is_guest: value.user_is_guest, + }; + + Ok(BrowserSession { + id: value.user_session_id.into(), + user, + created_at: value.user_session_created_at, + finished_at: value.user_session_finished_at, + user_agent: value.user_session_user_agent, + last_active_at: value.user_session_last_active_at, + last_active_ip: value.user_session_last_active_ip, + }) + } +} + +struct AuthenticationLookup { + user_session_authentication_id: Uuid, + created_at: DateTime, + user_password_id: Option, + upstream_oauth_authorization_session_id: Option, +} + +impl TryFrom for Authentication { + type Error = DatabaseInconsistencyError; + + fn try_from(value: AuthenticationLookup) -> Result { + let id = Ulid::from(value.user_session_authentication_id); + let authentication_method = match ( + value.user_password_id.map(Into::into), + value + .upstream_oauth_authorization_session_id + .map(Into::into), + ) { + (Some(user_password_id), None) => AuthenticationMethod::Password { user_password_id }, + (None, Some(upstream_oauth2_session_id)) => AuthenticationMethod::UpstreamOAuth2 { + upstream_oauth2_session_id, + }, + (None, None) => AuthenticationMethod::Unknown, + _ => { + return Err(DatabaseInconsistencyError::on("user_session_authentications").row(id)); + } + }; + + Ok(Authentication { + id, + created_at: value.created_at, + authentication_method, + }) + } +} + +impl crate::filter::Filter for BrowserSessionFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all() + .add_option(self.user().map(|user| { + Expr::col((UserSessions::Table, UserSessions::UserId)).eq(Uuid::from(user.id)) + })) + .add_option(self.state().map(|state| { + if state.is_active() { + Expr::col((UserSessions::Table, UserSessions::FinishedAt)).is_null() + } else { + Expr::col((UserSessions::Table, UserSessions::FinishedAt)).is_not_null() + } + })) + .add_option(self.last_active_after().map(|last_active_after| { + Expr::col((UserSessions::Table, UserSessions::LastActiveAt)).gt(last_active_after) + })) + .add_option(self.last_active_before().map(|last_active_before| { + Expr::col((UserSessions::Table, UserSessions::LastActiveAt)).lt(last_active_before) + })) + .add_option(self.linked_to_upstream_sessions().map(|filter| { + Expr::col((UserSessions::Table, UserSessions::UserSessionId)).in_subquery( + Query::select() + .expr(Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UserSessionId, + ))) + .from(UpstreamOAuthAuthorizationSessions::Table) + .apply_filter(filter) + .take(), + ) + })) + } +} + +#[async_trait] +impl BrowserSessionRepository for PgBrowserSessionRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.browser_session.lookup", + skip_all, + fields( + db.query.text, + user_session.id = %id, + ), + err, + )] + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error> { + let res = sqlx::query_as!( + SessionLookup, + r#" + SELECT s.user_session_id + , s.created_at AS "user_session_created_at" + , s.finished_at AS "user_session_finished_at" + , s.user_agent AS "user_session_user_agent" + , s.last_active_at AS "user_session_last_active_at" + , s.last_active_ip AS "user_session_last_active_ip: IpAddr" + , u.user_id + , u.username AS "user_username" + , u.created_at AS "user_created_at" + , u.locked_at AS "user_locked_at" + , u.deactivated_at AS "user_deactivated_at" + , u.can_request_admin AS "user_can_request_admin" + , u.is_guest AS "user_is_guest" + FROM user_sessions s + INNER JOIN users u + USING (user_id) + WHERE s.user_session_id = $1 + "#, + Uuid::from(id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(res) = res else { return Ok(None) }; + + Ok(Some(res.try_into()?)) + } + + #[tracing::instrument( + name = "db.browser_session.add", + skip_all, + fields( + db.query.text, + %user.id, + user_session.id, + ), + err, + )] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + user_agent: Option, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("user_session.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO user_sessions (user_session_id, user_id, created_at, user_agent) + VALUES ($1, $2, $3, $4) + "#, + Uuid::from(id), + Uuid::from(user.id), + created_at, + user_agent.as_deref(), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + let session = BrowserSession { + id, + // XXX + user: user.clone(), + created_at, + finished_at: None, + user_agent, + last_active_at: None, + last_active_ip: None, + }; + + Ok(session) + } + + #[tracing::instrument( + name = "db.browser_session.finish", + skip_all, + fields( + db.query.text, + %user_session.id, + ), + err, + )] + async fn finish( + &mut self, + clock: &dyn Clock, + mut user_session: BrowserSession, + ) -> Result { + let finished_at = clock.now(); + let res = sqlx::query!( + r#" + UPDATE user_sessions + SET finished_at = $1 + WHERE user_session_id = $2 + "#, + finished_at, + Uuid::from(user_session.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + user_session.finished_at = Some(finished_at); + + DatabaseError::ensure_affected_rows(&res, 1)?; + + Ok(user_session) + } + + #[tracing::instrument( + name = "db.browser_session.finish_bulk", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn finish_bulk( + &mut self, + clock: &dyn Clock, + filter: BrowserSessionFilter<'_>, + ) -> Result { + let finished_at = clock.now(); + let (sql, arguments) = sea_query::Query::update() + .table(UserSessions::Table) + .value(UserSessions::FinishedAt, finished_at) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let res = sqlx::query_with(&sql, arguments) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(res.rows_affected().try_into().unwrap_or(usize::MAX)) + } + + #[tracing::instrument( + name = "db.browser_session.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: BrowserSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error> { + let (sql, arguments) = sea_query::Query::select() + .expr_as( + Expr::col((UserSessions::Table, UserSessions::UserSessionId)), + SessionLookupIden::UserSessionId, + ) + .expr_as( + Expr::col((UserSessions::Table, UserSessions::CreatedAt)), + SessionLookupIden::UserSessionCreatedAt, + ) + .expr_as( + Expr::col((UserSessions::Table, UserSessions::FinishedAt)), + SessionLookupIden::UserSessionFinishedAt, + ) + .expr_as( + Expr::col((UserSessions::Table, UserSessions::UserAgent)), + SessionLookupIden::UserSessionUserAgent, + ) + .expr_as( + Expr::col((UserSessions::Table, UserSessions::LastActiveAt)), + SessionLookupIden::UserSessionLastActiveAt, + ) + .expr_as( + Expr::col((UserSessions::Table, UserSessions::LastActiveIp)), + SessionLookupIden::UserSessionLastActiveIp, + ) + .expr_as( + Expr::col((Users::Table, Users::UserId)), + SessionLookupIden::UserId, + ) + .expr_as( + Expr::col((Users::Table, Users::Username)), + SessionLookupIden::UserUsername, + ) + .expr_as( + Expr::col((Users::Table, Users::CreatedAt)), + SessionLookupIden::UserCreatedAt, + ) + .expr_as( + Expr::col((Users::Table, Users::LockedAt)), + SessionLookupIden::UserLockedAt, + ) + .expr_as( + Expr::col((Users::Table, Users::DeactivatedAt)), + SessionLookupIden::UserDeactivatedAt, + ) + .expr_as( + Expr::col((Users::Table, Users::CanRequestAdmin)), + SessionLookupIden::UserCanRequestAdmin, + ) + .expr_as( + Expr::col((Users::Table, Users::IsGuest)), + SessionLookupIden::UserIsGuest, + ) + .from(UserSessions::Table) + .inner_join( + Users::Table, + Expr::col((UserSessions::Table, UserSessions::UserId)) + .equals((Users::Table, Users::UserId)), + ) + .apply_filter(filter) + .generate_pagination( + (UserSessions::Table, UserSessions::UserSessionId), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination + .process(edges) + .try_map(BrowserSession::try_from)?; + + Ok(page) + } + + #[tracing::instrument( + name = "db.browser_session.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count(&mut self, filter: BrowserSessionFilter<'_>) -> Result { + let (sql, arguments) = sea_query::Query::select() + .expr(Expr::col((UserSessions::Table, UserSessions::UserSessionId)).count()) + .from(UserSessions::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } + + #[tracing::instrument( + name = "db.browser_session.authenticate_with_password", + skip_all, + fields( + db.query.text, + %user_session.id, + %user_password.id, + user_session_authentication.id, + ), + err, + )] + async fn authenticate_with_password( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user_session: &BrowserSession, + user_password: &Password, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record( + "user_session_authentication.id", + tracing::field::display(id), + ); + + sqlx::query!( + r#" + INSERT INTO user_session_authentications + (user_session_authentication_id, user_session_id, created_at, user_password_id) + VALUES ($1, $2, $3, $4) + "#, + Uuid::from(id), + Uuid::from(user_session.id), + created_at, + Uuid::from(user_password.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(Authentication { + id, + created_at, + authentication_method: AuthenticationMethod::Password { + user_password_id: user_password.id, + }, + }) + } + + #[tracing::instrument( + name = "db.browser_session.authenticate_with_upstream", + skip_all, + fields( + db.query.text, + %user_session.id, + %upstream_oauth_session.id, + user_session_authentication.id, + ), + err, + )] + async fn authenticate_with_upstream( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user_session: &BrowserSession, + upstream_oauth_session: &UpstreamOAuthAuthorizationSession, + ) -> Result { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record( + "user_session_authentication.id", + tracing::field::display(id), + ); + + sqlx::query!( + r#" + INSERT INTO user_session_authentications + (user_session_authentication_id, user_session_id, created_at, upstream_oauth_authorization_session_id) + VALUES ($1, $2, $3, $4) + "#, + Uuid::from(id), + Uuid::from(user_session.id), + created_at, + Uuid::from(upstream_oauth_session.id), + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(Authentication { + id, + created_at, + authentication_method: AuthenticationMethod::UpstreamOAuth2 { + upstream_oauth2_session_id: upstream_oauth_session.id, + }, + }) + } + + #[tracing::instrument( + name = "db.browser_session.get_last_authentication", + skip_all, + fields( + db.query.text, + %user_session.id, + ), + err, + )] + async fn get_last_authentication( + &mut self, + user_session: &BrowserSession, + ) -> Result, Self::Error> { + let authentication = sqlx::query_as!( + AuthenticationLookup, + r#" + SELECT user_session_authentication_id + , created_at + , user_password_id + , upstream_oauth_authorization_session_id + FROM user_session_authentications + WHERE user_session_id = $1 + ORDER BY created_at DESC + LIMIT 1 + "#, + Uuid::from(user_session.id), + ) + .traced() + .fetch_optional(&mut *self.conn) + .await?; + + let Some(authentication) = authentication else { + return Ok(None); + }; + + let authentication = Authentication::try_from(authentication)?; + Ok(Some(authentication)) + } + + #[tracing::instrument( + name = "db.browser_session.record_batch_activity", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn record_batch_activity( + &mut self, + mut activities: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error> { + // Sort the activity by ID, so that when batching the updates, Postgres + // locks the rows in a stable order, preventing deadlocks + activities.sort_unstable(); + let mut ids = Vec::with_capacity(activities.len()); + let mut last_activities = Vec::with_capacity(activities.len()); + let mut ips = Vec::with_capacity(activities.len()); + + for (id, last_activity, ip) in activities { + ids.push(Uuid::from(id)); + last_activities.push(last_activity); + ips.push(ip); + } + + let res = sqlx::query!( + r#" + UPDATE user_sessions + SET last_active_at = GREATEST(t.last_active_at, user_sessions.last_active_at) + , last_active_ip = COALESCE(t.last_active_ip, user_sessions.last_active_ip) + FROM ( + SELECT * + FROM UNNEST($1::uuid[], $2::timestamptz[], $3::inet[]) + AS t(user_session_id, last_active_at, last_active_ip) + ) AS t + WHERE user_sessions.user_session_id = t.user_session_id + "#, + &ids, + &last_activities, + &ips as &[Option], + ) + .traced() + .execute(&mut *self.conn) + .await?; + + DatabaseError::ensure_affected_rows(&res, ids.len().try_into().unwrap_or(u64::MAX))?; + + Ok(()) + } + + #[tracing::instrument( + name = "db.browser_session.cleanup_finished", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + until = %until, + limit = limit, + ), + err, + )] + async fn cleanup_finished( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH + to_delete AS ( + SELECT user_session_id, finished_at + FROM user_sessions us + WHERE us.finished_at IS NOT NULL + AND ($1::timestamptz IS NULL OR us.finished_at >= $1) + AND us.finished_at < $2 + -- Only delete if no oauth2_sessions reference this user_session + AND NOT EXISTS ( + SELECT 1 FROM oauth2_sessions os + WHERE os.user_session_id = us.user_session_id + ) + -- Only delete if no compat_sessions reference this user_session + AND NOT EXISTS ( + SELECT 1 FROM compat_sessions cs + WHERE cs.user_session_id = us.user_session_id + ) + ORDER BY us.finished_at ASC + LIMIT $3 + FOR UPDATE OF us + ), + deleted_authentications AS ( + DELETE FROM user_session_authentications USING to_delete + WHERE user_session_authentications.user_session_id = to_delete.user_session_id + ), + deleted_sessions AS ( + DELETE FROM user_sessions USING to_delete + WHERE user_sessions.user_session_id = to_delete.user_session_id + RETURNING user_sessions.finished_at + ) + SELECT COUNT(*) as "count!", MAX(finished_at) as last_finished_at FROM deleted_sessions + "#, + since, + until, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_finished_at, + )) + } + + #[tracing::instrument( + name = "db.browser_session.cleanup_inactive_ips", + skip_all, + fields( + db.query.text, + since = since.map(tracing::field::display), + threshold = %threshold, + limit = limit, + ), + err, + )] + async fn cleanup_inactive_ips( + &mut self, + since: Option>, + threshold: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH to_update AS ( + SELECT user_session_id, last_active_at + FROM user_sessions + WHERE last_active_ip IS NOT NULL + AND last_active_at IS NOT NULL + AND ($1::timestamptz IS NULL OR last_active_at >= $1) + AND last_active_at < $2 + ORDER BY last_active_at ASC + LIMIT $3 + FOR UPDATE + ), + updated AS ( + UPDATE user_sessions + SET last_active_ip = NULL + FROM to_update + WHERE user_sessions.user_session_id = to_update.user_session_id + RETURNING user_sessions.last_active_at + ) + SELECT COUNT(*) AS "count!", MAX(last_active_at) AS last_active_at FROM updated + "#, + since, + threshold, + i64::try_from(limit).unwrap_or(i64::MAX), + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_active_at, + )) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/user/terms.rs b/matrix-authentication-service/crates/storage-pg/src/user/terms.rs new file mode 100644 index 00000000..df83d937 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/user/terms.rs @@ -0,0 +1,74 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{Clock, User}; +use mas_storage::user::UserTermsRepository; +use rand::RngCore; +use sqlx::PgConnection; +use ulid::Ulid; +use url::Url; +use uuid::Uuid; + +use crate::{DatabaseError, tracing::ExecuteExt}; + +/// An implementation of [`UserTermsRepository`] for a PostgreSQL connection +pub struct PgUserTermsRepository<'c> { + conn: &'c mut PgConnection, +} + +impl<'c> PgUserTermsRepository<'c> { + /// Create a new [`PgUserTermsRepository`] from an active PostgreSQL + /// connection + pub fn new(conn: &'c mut PgConnection) -> Self { + Self { conn } + } +} + +#[async_trait] +impl UserTermsRepository for PgUserTermsRepository<'_> { + type Error = DatabaseError; + + #[tracing::instrument( + name = "db.user_terms.accept_terms", + skip_all, + fields( + db.query.text, + %user.id, + user_terms.id, + %user_terms.url = terms_url.as_str(), + ), + err, + )] + async fn accept_terms( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + terms_url: Url, + ) -> Result<(), Self::Error> { + let created_at = clock.now(); + let id = Ulid::from_datetime_with_source(created_at.into(), rng); + tracing::Span::current().record("user_terms.id", tracing::field::display(id)); + + sqlx::query!( + r#" + INSERT INTO user_terms (user_terms_id, user_id, terms_url, created_at) + VALUES ($1, $2, $3, $4) + ON CONFLICT (user_id, terms_url) DO NOTHING + "#, + Uuid::from(id), + Uuid::from(user.id), + terms_url.as_str(), + created_at, + ) + .traced() + .execute(&mut *self.conn) + .await?; + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/storage-pg/src/user/tests.rs b/matrix-authentication-service/crates/storage-pg/src/user/tests.rs new file mode 100644 index 00000000..be106c56 --- /dev/null +++ b/matrix-authentication-service/crates/storage-pg/src/user/tests.rs @@ -0,0 +1,890 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use chrono::Duration; +use mas_data_model::{Clock, clock::MockClock}; +use mas_iana::jose::JsonWebSignatureAlg; +use mas_storage::{ + Pagination, RepositoryAccess, + upstream_oauth2::{UpstreamOAuthProviderParams, UpstreamOAuthSessionFilter}, + user::{ + BrowserSessionFilter, BrowserSessionRepository, UserEmailFilter, UserEmailRepository, + UserFilter, UserPasswordRepository, UserRepository, + }, +}; +use oauth2_types::scope::{OPENID, Scope}; +use rand::SeedableRng; +use rand_chacha::ChaChaRng; +use sqlx::PgPool; + +use crate::PgRepository; + +/// Test the user repository, by adding and looking up a user +#[sqlx::test(migrator = "crate::MIGRATOR")] +async fn test_user_repo(pool: PgPool) { + const USERNAME: &str = "john"; + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let all = UserFilter::new(); + let admin = all.can_request_admin_only(); + let non_admin = all.cannot_request_admin_only(); + let active = all.active_only(); + let locked = all.locked_only(); + let deactivated = all.deactivated_only(); + + // Initially, the user shouldn't exist + assert!(!repo.user().exists(USERNAME).await.unwrap()); + assert!( + repo.user() + .find_by_username(USERNAME) + .await + .unwrap() + .is_none() + ); + + assert_eq!(repo.user().count(all).await.unwrap(), 0); + assert_eq!(repo.user().count(admin).await.unwrap(), 0); + assert_eq!(repo.user().count(non_admin).await.unwrap(), 0); + assert_eq!(repo.user().count(active).await.unwrap(), 0); + assert_eq!(repo.user().count(locked).await.unwrap(), 0); + assert_eq!(repo.user().count(deactivated).await.unwrap(), 0); + + // Adding the user should work + let user = repo + .user() + .add(&mut rng, &clock, USERNAME.to_owned()) + .await + .unwrap(); + + // And now it should exist + assert!(repo.user().exists(USERNAME).await.unwrap()); + assert!( + repo.user() + .find_by_username(USERNAME) + .await + .unwrap() + .is_some() + ); + assert!(repo.user().lookup(user.id).await.unwrap().is_some()); + + assert_eq!(repo.user().count(all).await.unwrap(), 1); + assert_eq!(repo.user().count(admin).await.unwrap(), 0); + assert_eq!(repo.user().count(non_admin).await.unwrap(), 1); + assert_eq!(repo.user().count(active).await.unwrap(), 1); + assert_eq!(repo.user().count(locked).await.unwrap(), 0); + assert_eq!(repo.user().count(deactivated).await.unwrap(), 0); + + // Adding a second time should give a conflict + // It should not poison the transaction though + assert!( + repo.user() + .add(&mut rng, &clock, USERNAME.to_owned()) + .await + .is_err() + ); + + // Try locking a user + assert!(user.is_valid()); + let user = repo.user().lock(&clock, user).await.unwrap(); + assert!(!user.is_valid()); + + assert_eq!(repo.user().count(all).await.unwrap(), 1); + assert_eq!(repo.user().count(admin).await.unwrap(), 0); + assert_eq!(repo.user().count(non_admin).await.unwrap(), 1); + assert_eq!(repo.user().count(active).await.unwrap(), 0); + assert_eq!(repo.user().count(locked).await.unwrap(), 1); + assert_eq!(repo.user().count(deactivated).await.unwrap(), 0); + + // Check that the property is retrieved on lookup + let user = repo.user().lookup(user.id).await.unwrap().unwrap(); + assert!(!user.is_valid()); + + // Locking a second time should not fail + let user = repo.user().lock(&clock, user).await.unwrap(); + assert!(!user.is_valid()); + + // Try unlocking a user + let user = repo.user().unlock(user).await.unwrap(); + assert!(user.is_valid()); + + // Check that the property is retrieved on lookup + let user = repo.user().lookup(user.id).await.unwrap().unwrap(); + assert!(user.is_valid()); + + // Unlocking a second time should not fail + let user = repo.user().unlock(user).await.unwrap(); + assert!(user.is_valid()); + + // Set the can_request_admin flag + let user = repo.user().set_can_request_admin(user, true).await.unwrap(); + assert!(user.can_request_admin); + + assert_eq!(repo.user().count(all).await.unwrap(), 1); + assert_eq!(repo.user().count(admin).await.unwrap(), 1); + assert_eq!(repo.user().count(non_admin).await.unwrap(), 0); + assert_eq!(repo.user().count(active).await.unwrap(), 1); + assert_eq!(repo.user().count(locked).await.unwrap(), 0); + assert_eq!(repo.user().count(deactivated).await.unwrap(), 0); + + // Check that the property is retrieved on lookup + let user = repo.user().lookup(user.id).await.unwrap().unwrap(); + assert!(user.can_request_admin); + + // Unset the can_request_admin flag + let user = repo + .user() + .set_can_request_admin(user, false) + .await + .unwrap(); + assert!(!user.can_request_admin); + + // Check that the property is retrieved on lookup + let user = repo.user().lookup(user.id).await.unwrap().unwrap(); + assert!(!user.can_request_admin); + + assert_eq!(repo.user().count(all).await.unwrap(), 1); + assert_eq!(repo.user().count(admin).await.unwrap(), 0); + assert_eq!(repo.user().count(non_admin).await.unwrap(), 1); + assert_eq!(repo.user().count(active).await.unwrap(), 1); + assert_eq!(repo.user().count(locked).await.unwrap(), 0); + assert_eq!(repo.user().count(deactivated).await.unwrap(), 0); + + // Deactivating the user should work + let user = repo.user().deactivate(&clock, user).await.unwrap(); + assert!(user.deactivated_at.is_some()); + + // Check that the property is retrieved on lookup + let user = repo.user().lookup(user.id).await.unwrap().unwrap(); + assert!(user.deactivated_at.is_some()); + + // Deactivating a second time should not fail + let user = repo.user().deactivate(&clock, user).await.unwrap(); + assert!(user.deactivated_at.is_some()); + + assert_eq!(repo.user().count(all).await.unwrap(), 1); + assert_eq!(repo.user().count(admin).await.unwrap(), 0); + assert_eq!(repo.user().count(non_admin).await.unwrap(), 1); + assert_eq!(repo.user().count(active).await.unwrap(), 0); + assert_eq!(repo.user().count(locked).await.unwrap(), 0); + assert_eq!(repo.user().count(deactivated).await.unwrap(), 1); + + // Test the search filter + assert_eq!( + repo.user() + .count(all.matching_search("alice")) + .await + .unwrap(), + 0 + ); + assert_eq!( + repo.user().count(all.matching_search("JO")).await.unwrap(), + 1 + ); + + // Check the list method + let list = repo.user().list(all, Pagination::first(10)).await.unwrap(); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node.id, user.id); + + let list = repo + .user() + .list(admin, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(list.edges.len(), 0); + + let list = repo + .user() + .list(non_admin, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node.id, user.id); + + let list = repo + .user() + .list(active, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(list.edges.len(), 0); + + let list = repo + .user() + .list(locked, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(list.edges.len(), 0); + + let list = repo + .user() + .list(deactivated, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(list.edges.len(), 1); + assert_eq!(list.edges[0].node.id, user.id); + + repo.save().await.unwrap(); +} + +/// Test [`UserRepository::find_by_username`] with different casings. +#[sqlx::test(migrator = "crate::MIGRATOR")] +async fn test_user_repo_find_by_username(pool: PgPool) { + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let alice = repo + .user() + .add(&mut rng, &clock, "Alice".to_owned()) + .await + .unwrap(); + let bob1 = repo + .user() + .add(&mut rng, &clock, "Bob".to_owned()) + .await + .unwrap(); + let bob2 = repo + .user() + .add(&mut rng, &clock, "BOB".to_owned()) + .await + .unwrap(); + + // This is fine, we can do a case-insensitive search + assert_eq!( + repo.user().find_by_username("alice").await.unwrap(), + Some(alice) + ); + + // In case there are multiple users with the same username, we should return the + // one that matches the exact casing + assert_eq!( + repo.user().find_by_username("Bob").await.unwrap(), + Some(bob1) + ); + assert_eq!( + repo.user().find_by_username("BOB").await.unwrap(), + Some(bob2) + ); + + // If none match, we should return None + assert!(repo.user().find_by_username("bob").await.unwrap().is_none()); +} + +/// Test the user email repository, by trying out most of its methods +#[sqlx::test(migrator = "crate::MIGRATOR")] +async fn test_user_email_repo(pool: PgPool) { + const USERNAME: &str = "john"; + const EMAIL: &str = "john@example.com"; + // This is what is stored in the database, making sure that: + // 1. we don't normalize the email address when storing it + // 2. looking it up is case-incensitive + const UPPERCASE_EMAIL: &str = "JOHN@EXAMPLE.COM"; + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let user = repo + .user() + .add(&mut rng, &clock, USERNAME.to_owned()) + .await + .unwrap(); + + // The user email should not exist yet + assert!( + repo.user_email() + .find(&user, EMAIL) + .await + .unwrap() + .is_none() + ); + + let all = UserEmailFilter::new().for_user(&user); + + // Check the counts + assert_eq!(repo.user_email().count(all).await.unwrap(), 0); + + let user_email = repo + .user_email() + .add(&mut rng, &clock, &user, UPPERCASE_EMAIL.to_owned()) + .await + .unwrap(); + + assert_eq!(user_email.user_id, user.id); + assert_eq!(user_email.email, UPPERCASE_EMAIL); + + // Check the counts + assert_eq!(repo.user_email().count(all).await.unwrap(), 1); + + assert!( + repo.user_email() + .find(&user, EMAIL) + .await + .unwrap() + .is_some() + ); + + let user_email = repo + .user_email() + .lookup(user_email.id) + .await + .unwrap() + .expect("user email was not found"); + + assert_eq!(user_email.user_id, user.id); + assert_eq!(user_email.email, UPPERCASE_EMAIL); + + // Listing the user emails should work + let emails = repo + .user_email() + .list(all, Pagination::first(10)) + .await + .unwrap(); + assert!(!emails.has_next_page); + assert_eq!(emails.edges.len(), 1); + assert_eq!(emails.edges[0].node, user_email); + + // Listing emails from the email address should work + let emails = repo + .user_email() + .list(all.for_email(EMAIL), Pagination::first(10)) + .await + .unwrap(); + assert!(!emails.has_next_page); + assert_eq!(emails.edges.len(), 1); + assert_eq!(emails.edges[0].node, user_email); + + // Filtering on another email should not return anything + let emails = repo + .user_email() + .list(all.for_email("hello@example.com"), Pagination::first(10)) + .await + .unwrap(); + assert!(!emails.has_next_page); + assert!(emails.edges.is_empty()); + + // Counting also works with the email filter + assert_eq!( + repo.user_email().count(all.for_email(EMAIL)).await.unwrap(), + 1 + ); + assert_eq!( + repo.user_email() + .count(all.for_email("hello@example.com")) + .await + .unwrap(), + 0 + ); + + // Deleting the user email should work + repo.user_email().remove(user_email).await.unwrap(); + assert_eq!(repo.user_email().count(all).await.unwrap(), 0); + + // Add a few emails + for i in 0..5 { + let email = format!("email{i}@example.com"); + repo.user_email() + .add(&mut rng, &clock, &user, email) + .await + .unwrap(); + } + assert_eq!(repo.user_email().count(all).await.unwrap(), 5); + + // Try removing all the emails + let affected = repo.user_email().remove_bulk(all).await.unwrap(); + assert_eq!(affected, 5); + assert_eq!(repo.user_email().count(all).await.unwrap(), 0); + + repo.save().await.unwrap(); +} + +/// Test the authentication codes methods in the user email repository +#[sqlx::test(migrator = "crate::MIGRATOR")] +async fn test_user_email_repo_authentications(pool: PgPool) { + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + // Create a user and a user session so that we can create an authentication + let user = repo + .user() + .add(&mut rng, &clock, "alice".to_owned()) + .await + .unwrap(); + + let browser_session = repo + .browser_session() + .add(&mut rng, &clock, &user, None) + .await + .unwrap(); + + // Create an authentication session + let authentication = repo + .user_email() + .add_authentication_for_session( + &mut rng, + &clock, + "alice@example.com".to_owned(), + &browser_session, + ) + .await + .unwrap(); + + assert_eq!(authentication.email, "alice@example.com"); + assert_eq!(authentication.user_session_id, Some(browser_session.id)); + assert_eq!(authentication.created_at, clock.now()); + assert_eq!(authentication.completed_at, None); + + // Check that we can find the authentication by its ID + let lookup = repo + .user_email() + .lookup_authentication(authentication.id) + .await + .unwrap() + .unwrap(); + assert_eq!(lookup.id, authentication.id); + assert_eq!(lookup.email, "alice@example.com"); + assert_eq!(lookup.user_session_id, Some(browser_session.id)); + assert_eq!(lookup.created_at, clock.now()); + assert_eq!(lookup.completed_at, None); + + // Add a code to the session + let code = repo + .user_email() + .add_authentication_code( + &mut rng, + &clock, + Duration::minutes(5), + &authentication, + "123456".to_owned(), + ) + .await + .unwrap(); + + assert_eq!(code.code, "123456"); + assert_eq!(code.created_at, clock.now()); + assert_eq!(code.expires_at, clock.now() + Duration::minutes(5)); + + // Check that we can find the code by its ID + let id = code.id; + let lookup = repo + .user_email() + .find_authentication_code(&authentication, "123456") + .await + .unwrap() + .unwrap(); + + assert_eq!(lookup.id, id); + assert_eq!(lookup.code, "123456"); + assert_eq!(lookup.created_at, clock.now()); + assert_eq!(lookup.expires_at, clock.now() + Duration::minutes(5)); + + // Complete the authentication + let authentication = repo + .user_email() + .complete_authentication_with_code(&clock, authentication, &code) + .await + .unwrap(); + + assert_eq!(authentication.id, authentication.id); + assert_eq!(authentication.email, "alice@example.com"); + assert_eq!(authentication.user_session_id, Some(browser_session.id)); + assert_eq!(authentication.created_at, clock.now()); + assert_eq!(authentication.completed_at, Some(clock.now())); + + // Check that we can find the completed authentication by its ID + let lookup = repo + .user_email() + .lookup_authentication(authentication.id) + .await + .unwrap() + .unwrap(); + assert_eq!(lookup.id, authentication.id); + assert_eq!(lookup.email, "alice@example.com"); + assert_eq!(lookup.user_session_id, Some(browser_session.id)); + assert_eq!(lookup.created_at, clock.now()); + assert_eq!(lookup.completed_at, Some(clock.now())); + + // Completing a second time should fail + let res = repo + .user_email() + .complete_authentication_with_code(&clock, authentication, &code) + .await; + assert!(res.is_err()); +} + +/// Test the user password repository implementation. +#[sqlx::test(migrator = "crate::MIGRATOR")] +async fn test_user_password_repo(pool: PgPool) { + const USERNAME: &str = "john"; + const FIRST_PASSWORD_HASH: &str = "doesntmatter"; + const SECOND_PASSWORD_HASH: &str = "alsodoesntmatter"; + + let mut repo = PgRepository::from_pool(&pool).await.unwrap().boxed(); + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let user = repo + .user() + .add(&mut rng, &clock, USERNAME.to_owned()) + .await + .unwrap(); + + // User should have no active password + assert!(repo.user_password().active(&user).await.unwrap().is_none()); + + // Insert a first password + let first_password = repo + .user_password() + .add( + &mut rng, + &clock, + &user, + 1, + FIRST_PASSWORD_HASH.to_owned(), + None, + ) + .await + .unwrap(); + + // User should now have an active password + let first_password_lookup = repo + .user_password() + .active(&user) + .await + .unwrap() + .expect("user should have an active password"); + + assert_eq!(first_password.id, first_password_lookup.id); + assert_eq!(first_password_lookup.hashed_password, FIRST_PASSWORD_HASH); + assert_eq!(first_password_lookup.version, 1); + assert_eq!(first_password_lookup.upgraded_from_id, None); + + // Getting the last inserted password is based on the clock, so we need to + // advance it + clock.advance(Duration::microseconds(10 * 1000 * 1000)); + + let second_password = repo + .user_password() + .add( + &mut rng, + &clock, + &user, + 2, + SECOND_PASSWORD_HASH.to_owned(), + Some(&first_password), + ) + .await + .unwrap(); + + // User should now have an active password + let second_password_lookup = repo + .user_password() + .active(&user) + .await + .unwrap() + .expect("user should have an active password"); + + assert_eq!(second_password.id, second_password_lookup.id); + assert_eq!(second_password_lookup.hashed_password, SECOND_PASSWORD_HASH); + assert_eq!(second_password_lookup.version, 2); + assert_eq!( + second_password_lookup.upgraded_from_id, + Some(first_password.id) + ); + + repo.save().await.unwrap(); +} + +#[sqlx::test(migrator = "crate::MIGRATOR")] +async fn test_user_session(pool: PgPool) { + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let alice = repo + .user() + .add(&mut rng, &clock, "alice".to_owned()) + .await + .unwrap(); + + let bob = repo + .user() + .add(&mut rng, &clock, "bob".to_owned()) + .await + .unwrap(); + + let all = BrowserSessionFilter::default(); + let active = all.active_only(); + let finished = all.finished_only(); + + assert_eq!(repo.browser_session().count(all).await.unwrap(), 0); + assert_eq!(repo.browser_session().count(active).await.unwrap(), 0); + assert_eq!(repo.browser_session().count(finished).await.unwrap(), 0); + + let session = repo + .browser_session() + .add(&mut rng, &clock, &alice, None) + .await + .unwrap(); + assert_eq!(session.user.id, alice.id); + assert!(session.finished_at.is_none()); + + assert_eq!(repo.browser_session().count(all).await.unwrap(), 1); + assert_eq!(repo.browser_session().count(active).await.unwrap(), 1); + assert_eq!(repo.browser_session().count(finished).await.unwrap(), 0); + + // The session should be in the list of active sessions + let session_list = repo + .browser_session() + .list(active, Pagination::first(10)) + .await + .unwrap(); + assert!(!session_list.has_next_page); + assert_eq!(session_list.edges.len(), 1); + assert_eq!(session_list.edges[0].node, session); + + let session_lookup = repo + .browser_session() + .lookup(session.id) + .await + .unwrap() + .expect("user session not found"); + + assert_eq!(session_lookup.id, session.id); + assert_eq!(session_lookup.user.id, alice.id); + assert!(session_lookup.finished_at.is_none()); + + // Finish the session + repo.browser_session() + .finish(&clock, session_lookup) + .await + .unwrap(); + + // The active session counter should be 0, and the finished one should be 1 + assert_eq!(repo.browser_session().count(all).await.unwrap(), 1); + assert_eq!(repo.browser_session().count(active).await.unwrap(), 0); + assert_eq!(repo.browser_session().count(finished).await.unwrap(), 1); + + // The session should not be in the list of active sessions anymore + let session_list = repo + .browser_session() + .list(active, Pagination::first(10)) + .await + .unwrap(); + assert!(!session_list.has_next_page); + assert!(session_list.edges.is_empty()); + + // Reload the session + let session_lookup = repo + .browser_session() + .lookup(session.id) + .await + .unwrap() + .expect("user session not found"); + + assert_eq!(session_lookup.id, session.id); + assert_eq!(session_lookup.user.id, alice.id); + // This time the session is finished + assert!(session_lookup.finished_at.is_some()); + + // Create a bunch of other sessions + for _ in 0..5 { + for user in &[&alice, &bob] { + repo.browser_session() + .add(&mut rng, &clock, user, None) + .await + .unwrap(); + } + } + + let all_alice = BrowserSessionFilter::new().for_user(&alice); + let active_alice = BrowserSessionFilter::new().for_user(&alice).active_only(); + let all_bob = BrowserSessionFilter::new().for_user(&bob); + let active_bob = BrowserSessionFilter::new().for_user(&bob).active_only(); + assert_eq!(repo.browser_session().count(all).await.unwrap(), 11); + assert_eq!(repo.browser_session().count(active).await.unwrap(), 10); + assert_eq!(repo.browser_session().count(finished).await.unwrap(), 1); + assert_eq!(repo.browser_session().count(all_alice).await.unwrap(), 6); + assert_eq!(repo.browser_session().count(active_alice).await.unwrap(), 5); + assert_eq!(repo.browser_session().count(all_bob).await.unwrap(), 5); + assert_eq!(repo.browser_session().count(active_bob).await.unwrap(), 5); + + // Finish all the sessions for alice + let affected = repo + .browser_session() + .finish_bulk(&clock, active_alice) + .await + .unwrap(); + assert_eq!(affected, 5); + assert_eq!(repo.browser_session().count(all_alice).await.unwrap(), 6); + assert_eq!(repo.browser_session().count(active_alice).await.unwrap(), 0); + assert_eq!(repo.browser_session().count(finished).await.unwrap(), 6); + + // Finish all the sessions for bob + let affected = repo + .browser_session() + .finish_bulk(&clock, active_bob) + .await + .unwrap(); + assert_eq!(affected, 5); + assert_eq!(repo.browser_session().count(all_bob).await.unwrap(), 5); + assert_eq!(repo.browser_session().count(active_bob).await.unwrap(), 0); + assert_eq!(repo.browser_session().count(finished).await.unwrap(), 11); + + // Checking the 'authenticaated by upstream sessions' filter + // We need a provider + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &clock, + UpstreamOAuthProviderParams { + issuer: None, + human_name: None, + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: + mas_data_model::UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports: mas_data_model::UpstreamOAuthProviderClaimsImports::default(), + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Disabled, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Disabled, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + + // Start a authorization session + let upstream_oauth_session = repo + .upstream_oauth_session() + .add(&mut rng, &clock, &provider, "state".to_owned(), None, None) + .await + .unwrap(); + + // Start a browser session + let session = repo + .browser_session() + .add(&mut rng, &clock, &alice, None) + .await + .unwrap(); + + // Make the session from alice authenticated by this session + repo.browser_session() + .authenticate_with_upstream(&mut rng, &clock, &session, &upstream_oauth_session) + .await + .unwrap(); + + // This will match all authorization sessions, which matches exactly that one + // authorization session + let upstream_oauth_session_filter = UpstreamOAuthSessionFilter::new(); + let filter = + BrowserSessionFilter::new().linked_to_upstream_sessions_only(upstream_oauth_session_filter); + + // Now try to look it up + let page = repo + .browser_session() + .list(filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 1); + assert_eq!(page.edges[0].node.id, session.id); + + // Try counting + assert_eq!(repo.browser_session().count(filter).await.unwrap(), 1); + + // Try finishing the session + let affected = repo + .browser_session() + .finish_bulk(&clock, filter) + .await + .unwrap(); + assert_eq!(affected, 1); + + // Lookup the session by its ID + let lookup = repo + .browser_session() + .lookup(session.id) + .await + .unwrap() + .expect("session to be found in the database"); + // It should be finished + assert!(lookup.finished_at.is_some()); +} + +#[sqlx::test(migrator = "crate::MIGRATOR")] +async fn test_user_terms(pool: PgPool) { + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + let mut rng = ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + + let user = repo + .user() + .add(&mut rng, &clock, "john".to_owned()) + .await + .unwrap(); + + // Accepting the terms should work + repo.user_terms() + .accept_terms( + &mut rng, + &clock, + &user, + "https://example.com/terms".parse().unwrap(), + ) + .await + .unwrap(); + + // Accepting a second time should also work + repo.user_terms() + .accept_terms( + &mut rng, + &clock, + &user, + "https://example.com/terms".parse().unwrap(), + ) + .await + .unwrap(); + + // Accepting a different terms should also work + repo.user_terms() + .accept_terms( + &mut rng, + &clock, + &user, + "https://example.com/terms?v=2".parse().unwrap(), + ) + .await + .unwrap(); + + let mut conn = repo.into_inner(); + + // We should have two rows, as the first terms was deduped + let res: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM user_terms") + .fetch_one(&mut *conn) + .await + .unwrap(); + assert_eq!(res, 2); +} diff --git a/matrix-authentication-service/crates/storage/Cargo.toml b/matrix-authentication-service/crates/storage/Cargo.toml new file mode 100644 index 00000000..07f4330c --- /dev/null +++ b/matrix-authentication-service/crates/storage/Cargo.toml @@ -0,0 +1,36 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-storage" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +async-trait.workspace = true +chrono.workspace = true +futures-util.workspace = true +opentelemetry.workspace = true +rand_core.workspace = true +serde_json.workspace = true +serde.workspace = true +thiserror.workspace = true +tracing-opentelemetry.workspace = true +tracing.workspace = true +ulid.workspace = true +url.workspace = true + +oauth2-types.workspace = true +mas-data-model.workspace = true +mas-iana.workspace = true +mas-jose.workspace = true diff --git a/matrix-authentication-service/crates/storage/src/app_session.rs b/matrix-authentication-service/crates/storage/src/app_session.rs new file mode 100644 index 00000000..4c0b7703 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/app_session.rs @@ -0,0 +1,224 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repositories to interact with all kinds of sessions + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{BrowserSession, Clock, CompatSession, Device, Session, User}; + +use crate::{Page, Pagination, repository_impl}; + +/// The state of a session +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum AppSessionState { + /// The session is active + Active, + /// The session is finished + Finished, +} + +impl AppSessionState { + /// Returns [`true`] if we're looking for active sessions + #[must_use] + pub fn is_active(self) -> bool { + matches!(self, Self::Active) + } + + /// Returns [`true`] if we're looking for finished sessions + #[must_use] + pub fn is_finished(self) -> bool { + matches!(self, Self::Finished) + } +} + +/// An [`AppSession`] is either a [`CompatSession`] or an OAuth 2.0 [`Session`] +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum AppSession { + /// A compatibility layer session + Compat(Box), + + /// An OAuth 2.0 session + OAuth2(Box), +} + +/// Filtering parameters for application sessions +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct AppSessionFilter<'a> { + user: Option<&'a User>, + browser_session: Option<&'a BrowserSession>, + state: Option, + device_id: Option<&'a Device>, + last_active_before: Option>, + last_active_after: Option>, +} + +impl<'a> AppSessionFilter<'a> { + /// Create a new [`AppSessionFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the user who owns the sessions + #[must_use] + pub fn for_user(mut self, user: &'a User) -> Self { + self.user = Some(user); + self + } + + /// Get the user filter + #[must_use] + pub fn user(&self) -> Option<&'a User> { + self.user + } + + /// Set the browser session filter + #[must_use] + pub fn for_browser_session(mut self, browser_session: &'a BrowserSession) -> Self { + self.browser_session = Some(browser_session); + self + } + + /// Get the browser session filter + #[must_use] + pub fn browser_session(&self) -> Option<&'a BrowserSession> { + self.browser_session + } + + /// Set the device ID filter + #[must_use] + pub fn for_device(mut self, device_id: &'a Device) -> Self { + self.device_id = Some(device_id); + self + } + + /// Get the device ID filter + #[must_use] + pub fn device(&self) -> Option<&'a Device> { + self.device_id + } + + /// Only return sessions with a last active time before the given time + #[must_use] + pub fn with_last_active_before(mut self, last_active_before: DateTime) -> Self { + self.last_active_before = Some(last_active_before); + self + } + + /// Only return sessions with a last active time after the given time + #[must_use] + pub fn with_last_active_after(mut self, last_active_after: DateTime) -> Self { + self.last_active_after = Some(last_active_after); + self + } + + /// Get the last active before filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_before(&self) -> Option> { + self.last_active_before + } + + /// Get the last active after filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_after(&self) -> Option> { + self.last_active_after + } + + /// Only return active compatibility sessions + #[must_use] + pub fn active_only(mut self) -> Self { + self.state = Some(AppSessionState::Active); + self + } + + /// Only return finished compatibility sessions + #[must_use] + pub fn finished_only(mut self) -> Self { + self.state = Some(AppSessionState::Finished); + self + } + + /// Get the state filter + #[must_use] + pub fn state(&self) -> Option { + self.state + } +} + +/// A [`AppSessionRepository`] helps interacting with both [`CompatSession`] and +/// OAuth 2.0 [`Session`] at the same time saved in the storage backend +#[async_trait] +pub trait AppSessionRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// List [`AppSession`] with the given filter and pagination + /// + /// Returns a page of [`AppSession`] matching the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: AppSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count the number of [`AppSession`] with the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: AppSessionFilter<'_>) -> Result; + + /// Finishes any application sessions that are using the specified device's + /// ID. + /// + /// This is intended for logging in using an existing device ID (i.e. + /// replacing a device). + /// + /// Should be called *before* creating a new session for the device. + /// + /// Returns true if a session was finished. + async fn finish_sessions_to_replace_device( + &mut self, + clock: &dyn Clock, + user: &User, + device: &Device, + ) -> Result; +} + +repository_impl!(AppSessionRepository: + async fn list( + &mut self, + filter: AppSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + async fn count(&mut self, filter: AppSessionFilter<'_>) -> Result; + + async fn finish_sessions_to_replace_device( + &mut self, + clock: &dyn Clock, + user: &User, + device: &Device, + ) -> Result; +); diff --git a/matrix-authentication-service/crates/storage/src/compat/access_token.rs b/matrix-authentication-service/crates/storage/src/compat/access_token.rs new file mode 100644 index 00000000..8dd41ba4 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/compat/access_token.rs @@ -0,0 +1,113 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::Duration; +use mas_data_model::{Clock, CompatAccessToken, CompatSession}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::repository_impl; + +/// A [`CompatAccessTokenRepository`] helps interacting with +/// [`CompatAccessToken`] saved in the storage backend +#[async_trait] +pub trait CompatAccessTokenRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a compat access token by its ID + /// + /// Returns the compat access token if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `id`: The ID of the compat access token to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find a compat access token by its token + /// + /// Returns the compat access token if found, `None` otherwise + /// + /// # Parameters + /// + /// * `access_token`: The token of the compat access token to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_token( + &mut self, + access_token: &str, + ) -> Result, Self::Error>; + + /// Add a new compat access token to the database + /// + /// Returns the newly created compat access token + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `compat_session`: The compat session associated with the access token + /// * `token`: The token of the access token + /// * `expires_after`: The duration after which the access token expires, if + /// specified + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + compat_session: &CompatSession, + token: String, + expires_after: Option, + ) -> Result; + + /// Set the expiration time of the compat access token to now + /// + /// Returns the expired compat access token + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `compat_access_token`: The compat access token to expire + async fn expire( + &mut self, + clock: &dyn Clock, + compat_access_token: CompatAccessToken, + ) -> Result; +} + +repository_impl!(CompatAccessTokenRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_by_token( + &mut self, + access_token: &str, + ) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + compat_session: &CompatSession, + token: String, + expires_after: Option, + ) -> Result; + + async fn expire( + &mut self, + clock: &dyn Clock, + compat_access_token: CompatAccessToken, + ) -> Result; +); diff --git a/matrix-authentication-service/crates/storage/src/compat/mod.rs b/matrix-authentication-service/crates/storage/src/compat/mod.rs new file mode 100644 index 00000000..8c518d42 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/compat/mod.rs @@ -0,0 +1,19 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repositories to interact with entities of the compatibility layer + +mod access_token; +mod refresh_token; +mod session; +mod sso_login; + +pub use self::{ + access_token::CompatAccessTokenRepository, + refresh_token::CompatRefreshTokenRepository, + session::{CompatSessionFilter, CompatSessionRepository}, + sso_login::{CompatSsoLoginFilter, CompatSsoLoginRepository}, +}; diff --git a/matrix-authentication-service/crates/storage/src/compat/refresh_token.rs b/matrix-authentication-service/crates/storage/src/compat/refresh_token.rs new file mode 100644 index 00000000..d4434569 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/compat/refresh_token.rs @@ -0,0 +1,130 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{Clock, CompatAccessToken, CompatRefreshToken, CompatSession}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::repository_impl; + +/// A [`CompatRefreshTokenRepository`] helps interacting with +/// [`CompatRefreshToken`] saved in the storage backend +#[async_trait] +pub trait CompatRefreshTokenRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a compat refresh token by its ID + /// + /// Returns the compat refresh token if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `id`: The ID of the compat refresh token to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find a compat refresh token by its token + /// + /// Returns the compat refresh token if found, `None` otherwise + /// + /// # Parameters + /// + /// * `refresh_token`: The token of the compat refresh token to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_token( + &mut self, + refresh_token: &str, + ) -> Result, Self::Error>; + + /// Add a new compat refresh token to the database + /// + /// Returns the newly created compat refresh token + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `compat_session`: The compat session associated with this refresh + /// token + /// * `compat_access_token`: The compat access token created alongside this + /// refresh token + /// * `token`: The token of the refresh token + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + compat_session: &CompatSession, + compat_access_token: &CompatAccessToken, + token: String, + ) -> Result; + + /// Consume the given compat refresh token, as well as all other refresh + /// tokens from the same session, except for the given successor compat + /// refresh token. + /// + /// The given successor refresh token will thereafter be the only valid + /// refresh token for the session. + /// + /// # Historical context + /// + /// When using a refresh token, we must be able to mark multiple other + /// refresh tokens in the same session as consumed. + /// This is desirable because the syn2mas migration process can import + /// multiple refresh tokens for one device (compat session). + /// But once the user uses one of those, the others should no longer + /// be valid. + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `compat_refresh_token`: The compat refresh token to consume + /// + /// # Errors + /// + /// - Returns [`Self::Error`] if the underlying repository fails + /// - Returns an error if `compat_refresh_token` is not valid to be + /// consumed. + /// - Returns an error if no refresh tokens would be consumed. + async fn consume_and_replace( + &mut self, + clock: &dyn Clock, + compat_refresh_token: CompatRefreshToken, + successor_compat_refresh_token: &CompatRefreshToken, + ) -> Result; +} + +repository_impl!(CompatRefreshTokenRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_by_token( + &mut self, + refresh_token: &str, + ) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + compat_session: &CompatSession, + compat_access_token: &CompatAccessToken, + token: String, + ) -> Result; + + async fn consume_and_replace( + &mut self, + clock: &dyn Clock, + compat_refresh_token: CompatRefreshToken, + successor_compat_refresh_token: &CompatRefreshToken, + ) -> Result; +); diff --git a/matrix-authentication-service/crates/storage/src/compat/session.rs b/matrix-authentication-service/crates/storage/src/compat/session.rs new file mode 100644 index 00000000..8ea08d8f --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/compat/session.rs @@ -0,0 +1,479 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{BrowserSession, Clock, CompatSession, CompatSsoLogin, Device, User}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::{Page, Pagination, repository_impl, user::BrowserSessionFilter}; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum CompatSessionState { + Active, + Finished, +} + +impl CompatSessionState { + /// Returns [`true`] if we're looking for active sessions + #[must_use] + pub fn is_active(self) -> bool { + matches!(self, Self::Active) + } + + /// Returns [`true`] if we're looking for finished sessions + #[must_use] + pub fn is_finished(self) -> bool { + matches!(self, Self::Finished) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum CompatSessionType { + SsoLogin, + Unknown, +} + +impl CompatSessionType { + /// Returns [`true`] if we're looking for SSO logins + #[must_use] + pub fn is_sso_login(self) -> bool { + matches!(self, Self::SsoLogin) + } + + /// Returns [`true`] if we're looking for unknown sessions + #[must_use] + pub fn is_unknown(self) -> bool { + matches!(self, Self::Unknown) + } +} + +/// Filter parameters for listing compatibility sessions +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct CompatSessionFilter<'a> { + user: Option<&'a User>, + browser_session: Option<&'a BrowserSession>, + browser_session_filter: Option>, + state: Option, + auth_type: Option, + device: Option<&'a Device>, + last_active_before: Option>, + last_active_after: Option>, +} + +impl<'a> CompatSessionFilter<'a> { + /// Create a new [`CompatSessionFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the user who owns the compatibility sessions + #[must_use] + pub fn for_user(mut self, user: &'a User) -> Self { + self.user = Some(user); + self + } + + /// Get the user filter + #[must_use] + pub fn user(&self) -> Option<&'a User> { + self.user + } + + /// Set the device filter + #[must_use] + pub fn for_device(mut self, device: &'a Device) -> Self { + self.device = Some(device); + self + } + + /// Get the device filter + #[must_use] + pub fn device(&self) -> Option<&'a Device> { + self.device + } + + /// Set the browser session filter + #[must_use] + pub fn for_browser_session(mut self, browser_session: &'a BrowserSession) -> Self { + self.browser_session = Some(browser_session); + self + } + + /// Set the browser sessions filter + #[must_use] + pub fn for_browser_sessions( + mut self, + browser_session_filter: BrowserSessionFilter<'a>, + ) -> Self { + self.browser_session_filter = Some(browser_session_filter); + self + } + + /// Get the browser session filter + #[must_use] + pub fn browser_session(&self) -> Option<&'a BrowserSession> { + self.browser_session + } + + /// Get the browser sessions filter + #[must_use] + pub fn browser_session_filter(&self) -> Option> { + self.browser_session_filter + } + + /// Only return sessions with a last active time before the given time + #[must_use] + pub fn with_last_active_before(mut self, last_active_before: DateTime) -> Self { + self.last_active_before = Some(last_active_before); + self + } + + /// Only return sessions with a last active time after the given time + #[must_use] + pub fn with_last_active_after(mut self, last_active_after: DateTime) -> Self { + self.last_active_after = Some(last_active_after); + self + } + + /// Get the last active before filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_before(&self) -> Option> { + self.last_active_before + } + + /// Get the last active after filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_after(&self) -> Option> { + self.last_active_after + } + + /// Only return active compatibility sessions + #[must_use] + pub fn active_only(mut self) -> Self { + self.state = Some(CompatSessionState::Active); + self + } + + /// Only return finished compatibility sessions + #[must_use] + pub fn finished_only(mut self) -> Self { + self.state = Some(CompatSessionState::Finished); + self + } + + /// Get the state filter + #[must_use] + pub fn state(&self) -> Option { + self.state + } + + /// Only return SSO login compatibility sessions + #[must_use] + pub fn sso_login_only(mut self) -> Self { + self.auth_type = Some(CompatSessionType::SsoLogin); + self + } + + /// Only return unknown compatibility sessions + #[must_use] + pub fn unknown_only(mut self) -> Self { + self.auth_type = Some(CompatSessionType::Unknown); + self + } + + /// Get the auth type filter + #[must_use] + pub fn auth_type(&self) -> Option { + self.auth_type + } +} + +/// A [`CompatSessionRepository`] helps interacting with +/// [`CompatSession`] saved in the storage backend +#[async_trait] +pub trait CompatSessionRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a compat session by its ID + /// + /// Returns the compat session if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `id`: The ID of the compat session to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Start a new compat session + /// + /// Returns the newly created compat session + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `user`: The user to create the compat session for + /// * `device`: The device ID of this session + /// * `browser_session`: The browser session which created this session + /// * `is_synapse_admin`: Whether the session is a synapse admin session + /// * `human_name`: The human-readable name of the session provided by the + /// client or the user + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + #[expect(clippy::too_many_arguments)] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + device: Device, + browser_session: Option<&BrowserSession>, + is_synapse_admin: bool, + human_name: Option, + ) -> Result; + + /// End a compat session + /// + /// Returns the ended compat session + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `compat_session`: The compat session to end + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn finish( + &mut self, + clock: &dyn Clock, + compat_session: CompatSession, + ) -> Result; + + /// Mark all the [`CompatSession`] matching the given filter as finished + /// + /// Returns the number of sessions affected + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn finish_bulk( + &mut self, + clock: &dyn Clock, + filter: CompatSessionFilter<'_>, + ) -> Result; + + /// List [`CompatSession`] with the given filter and pagination + /// + /// Returns a page of compat sessions, with the associated SSO logins if any + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: CompatSessionFilter<'_>, + pagination: Pagination, + ) -> Result)>, Self::Error>; + + /// Count the number of [`CompatSession`] with the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: CompatSessionFilter<'_>) -> Result; + + /// Record a batch of [`CompatSession`] activity + /// + /// # Parameters + /// + /// * `activity`: A list of tuples containing the session ID, the last + /// activity timestamp and the IP address of the client + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn record_batch_activity( + &mut self, + activity: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error>; + + /// Record the user agent of a compat session + /// + /// # Parameters + /// + /// * `compat_session`: The compat session to record the user agent for + /// * `user_agent`: The user agent to record + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn record_user_agent( + &mut self, + compat_session: CompatSession, + user_agent: String, + ) -> Result; + + /// Set the human name of a compat session + /// + /// # Parameters + /// + /// * `compat_session`: The compat session to set the human name for + /// * `human_name`: The human name to set + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn set_human_name( + &mut self, + compat_session: CompatSession, + human_name: Option, + ) -> Result; + + /// Cleanup finished [`CompatSession`]s and their associated tokens. + /// + /// This deletes compat sessions that have been finished, along with their + /// associated access tokens, refresh tokens, and SSO logins. + /// + /// Returns the number of sessions deleted and the timestamp of the last + /// deleted session's `finished_at`, which can be used for pagination. + /// + /// # Parameters + /// + /// * `since`: Only delete sessions finished at or after this timestamp + /// * `until`: Only delete sessions finished before this timestamp + /// * `limit`: Maximum number of sessions to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_finished( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + /// Clear IP addresses from sessions inactive since the threshold + /// + /// Sets `last_active_ip` to `NULL` for sessions where `last_active_at` is + /// before the threshold. Returns the number of sessions affected and the + /// last `last_active_at` timestamp processed for pagination. + /// + /// # Parameters + /// + /// * `since`: Only process sessions with `last_active_at` at or after this + /// timestamp (exclusive). If `None`, starts from the beginning. + /// * `threshold`: Clear IPs for sessions with `last_active_at` before this + /// time + /// * `limit`: Maximum number of sessions to update in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_inactive_ips( + &mut self, + since: Option>, + threshold: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +} + +repository_impl!(CompatSessionRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + device: Device, + browser_session: Option<&BrowserSession>, + is_synapse_admin: bool, + human_name: Option, + ) -> Result; + + async fn finish( + &mut self, + clock: &dyn Clock, + compat_session: CompatSession, + ) -> Result; + + async fn finish_bulk( + &mut self, + clock: &dyn Clock, + filter: CompatSessionFilter<'_>, + ) -> Result; + + async fn list( + &mut self, + filter: CompatSessionFilter<'_>, + pagination: Pagination, + ) -> Result)>, Self::Error>; + + async fn count(&mut self, filter: CompatSessionFilter<'_>) -> Result; + + async fn record_batch_activity( + &mut self, + activity: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error>; + + async fn record_user_agent( + &mut self, + compat_session: CompatSession, + user_agent: String, + ) -> Result; + + async fn set_human_name( + &mut self, + compat_session: CompatSession, + human_name: Option, + ) -> Result; + + async fn cleanup_finished( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + async fn cleanup_inactive_ips( + &mut self, + since: Option>, + threshold: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/compat/sso_login.rs b/matrix-authentication-service/crates/storage/src/compat/sso_login.rs new file mode 100644 index 00000000..69de5294 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/compat/sso_login.rs @@ -0,0 +1,285 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{BrowserSession, Clock, CompatSession, CompatSsoLogin, User}; +use rand_core::RngCore; +use ulid::Ulid; +use url::Url; + +use crate::{Pagination, pagination::Page, repository_impl}; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum CompatSsoLoginState { + Pending, + Fulfilled, + Exchanged, +} + +impl CompatSsoLoginState { + /// Returns [`true`] if we're looking for pending SSO logins + #[must_use] + pub fn is_pending(self) -> bool { + matches!(self, Self::Pending) + } + + /// Returns [`true`] if we're looking for fulfilled SSO logins + #[must_use] + pub fn is_fulfilled(self) -> bool { + matches!(self, Self::Fulfilled) + } + + /// Returns [`true`] if we're looking for exchanged SSO logins + #[must_use] + pub fn is_exchanged(self) -> bool { + matches!(self, Self::Exchanged) + } +} + +/// Filter parameters for listing compat SSO logins +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct CompatSsoLoginFilter<'a> { + user: Option<&'a User>, + state: Option, +} + +impl<'a> CompatSsoLoginFilter<'a> { + /// Create a new empty filter + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the user who owns the SSO logins sessions + #[must_use] + pub fn for_user(mut self, user: &'a User) -> Self { + self.user = Some(user); + self + } + + /// Get the user filter + #[must_use] + pub fn user(&self) -> Option<&User> { + self.user + } + + /// Only return pending SSO logins + #[must_use] + pub fn pending_only(mut self) -> Self { + self.state = Some(CompatSsoLoginState::Pending); + self + } + + /// Only return fulfilled SSO logins + #[must_use] + pub fn fulfilled_only(mut self) -> Self { + self.state = Some(CompatSsoLoginState::Fulfilled); + self + } + + /// Only return exchanged SSO logins + #[must_use] + pub fn exchanged_only(mut self) -> Self { + self.state = Some(CompatSsoLoginState::Exchanged); + self + } + + /// Get the state filter + #[must_use] + pub fn state(&self) -> Option { + self.state + } +} + +/// A [`CompatSsoLoginRepository`] helps interacting with +/// [`CompatSsoLoginRepository`] saved in the storage backend +#[async_trait] +pub trait CompatSsoLoginRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a compat SSO login by its ID + /// + /// Returns the compat SSO login if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `id`: The ID of the compat SSO login to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find a compat SSO login by its session + /// + /// Returns the compat SSO login if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `session`: The session of the compat SSO login to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_for_session( + &mut self, + session: &CompatSession, + ) -> Result, Self::Error>; + + /// Find a compat SSO login by its login token + /// + /// Returns the compat SSO login if found, `None` otherwise + /// + /// # Parameters + /// + /// * `login_token`: The login token of the compat SSO login to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_token( + &mut self, + login_token: &str, + ) -> Result, Self::Error>; + + /// Start a new compat SSO login token + /// + /// Returns the newly created compat SSO login + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate the timestamps + /// * `login_token`: The login token given to the client + /// * `redirect_uri`: The redirect URI given by the client + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + login_token: String, + redirect_uri: Url, + ) -> Result; + + /// Fulfill a compat SSO login by providing a browser session + /// + /// Returns the fulfilled compat SSO login + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate the timestamps + /// * `compat_sso_login`: The compat SSO login to fulfill + /// * `browser_session`: The browser session to associate with the compat + /// SSO login + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn fulfill( + &mut self, + clock: &dyn Clock, + compat_sso_login: CompatSsoLogin, + browser_session: &BrowserSession, + ) -> Result; + + /// Mark a compat SSO login as exchanged + /// + /// Returns the exchanged compat SSO login + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate the timestamps + /// * `compat_sso_login`: The compat SSO login to mark as exchanged + /// * `compat_session`: The compat session created as a result of the + /// exchange + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn exchange( + &mut self, + clock: &dyn Clock, + compat_sso_login: CompatSsoLogin, + compat_session: &CompatSession, + ) -> Result; + + /// List [`CompatSsoLogin`] with the given filter and pagination + /// + /// Returns a page of compat SSO logins + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: CompatSsoLoginFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count the number of [`CompatSsoLogin`] with the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: CompatSsoLoginFilter<'_>) -> Result; +} + +repository_impl!(CompatSsoLoginRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_for_session( + &mut self, + session: &CompatSession, + ) -> Result, Self::Error>; + + async fn find_by_token( + &mut self, + login_token: &str, + ) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + login_token: String, + redirect_uri: Url, + ) -> Result; + + async fn fulfill( + &mut self, + clock: &dyn Clock, + compat_sso_login: CompatSsoLogin, + browser_session: &BrowserSession, + ) -> Result; + + async fn exchange( + &mut self, + clock: &dyn Clock, + compat_sso_login: CompatSsoLogin, + compat_session: &CompatSession, + ) -> Result; + + async fn list( + &mut self, + filter: CompatSsoLoginFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + async fn count(&mut self, filter: CompatSsoLoginFilter<'_>) -> Result; +); diff --git a/matrix-authentication-service/crates/storage/src/lib.rs b/matrix-authentication-service/crates/storage/src/lib.rs new file mode 100644 index 00000000..c5d5d0b2 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/lib.rs @@ -0,0 +1,127 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Interactions with the storage backend +//! +//! This crate provides a set of traits that can be implemented to interact with +//! the storage backend. Those traits are called repositories and are grouped by +//! the type of data they manage. +//! +//! Each of those reposotories can be accessed via the [`RepositoryAccess`] +//! trait. This trait can be wrapped in a [`BoxRepository`] to allow using it +//! without caring about the underlying storage backend, and without carrying +//! around the generic type parameter. +//! +//! # Defining a new repository +//! +//! To define a new repository, you have to: +//! 1. Define a new (async) repository trait, with the methods you need +//! 2. Write an implementation of this trait for each storage backend you want +//! (currently only for `mas-storage-pg`) +//! 3. Make it accessible via the [`RepositoryAccess`] trait +//! +//! The repository trait definition should look like this: +//! +//! ```ignore +//! #[async_trait] +//! pub trait FakeDataRepository: Send + Sync { +//! /// The error type returned by the repository +//! type Error; +//! +//! /// Lookup a [`FakeData`] by its ID +//! /// +//! /// Returns `None` if no [`FakeData`] was found +//! /// +//! /// # Parameters +//! /// +//! /// * `id`: The ID of the [`FakeData`] to lookup +//! /// +//! /// # Errors +//! /// +//! /// Returns [`Self::Error`] if the underlying repository fails +//! async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; +//! +//! /// Create a new [`FakeData`] +//! /// +//! /// Returns the newly-created [`FakeData`]. +//! /// +//! /// # Parameters +//! /// +//! /// * `rng`: The random number generator to use +//! /// * `clock`: The clock used to generate timestamps +//! /// +//! /// # Errors +//! /// +//! /// Returns [`Self::Error`] if the underlying repository fails +//! async fn add( +//! &mut self, +//! rng: &mut (dyn RngCore + Send), +//! clock: &dyn Clock, +//! ) -> Result; +//! } +//! +//! repository_impl!(FakeDataRepository: +//! async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; +//! async fn add( +//! &mut self, +//! rng: &mut (dyn RngCore + Send), +//! clock: &dyn Clock, +//! ) -> Result; +//! ); +//! ``` +//! +//! Four things to note with the implementation: +//! +//! 1. It defined an assocated error type, and all functions are faillible, +//! and use that error type +//! 2. Lookups return an `Result, Self::Error>`, because 'not found' +//! errors are usually cases that are handled differently +//! 3. Operations that need to record the current type use a +//! [`mas_data_model::Clock`] parameter. Operations that need to generate +//! new IDs also use a random number generator. +//! 4. All the methods use an `&mut self`. This is ensures only one operation +//! is done at a time on a single repository instance. +//! +//! Then update the [`RepositoryAccess`] trait to make the new repository +//! available: +//! +//! ```ignore +//! /// Access the various repositories the backend implements. +//! pub trait RepositoryAccess: Send { +//! /// The backend-specific error type used by each repository. +//! type Error: std::error::Error + Send + Sync + 'static; +//! +//! // ...other repositories... +//! +//! /// Get a [`FakeDataRepository`] +//! fn fake_data<'c>(&'c mut self) -> Box + 'c>; +//! } +//! ``` + +#![deny(clippy::future_not_send, missing_docs)] +#![allow(clippy::module_name_repetitions)] + +pub mod pagination; +pub(crate) mod repository; +mod utils; + +pub mod app_session; +pub mod compat; +pub mod oauth2; +pub mod personal; +pub mod policy_data; +pub mod queue; +pub mod upstream_oauth2; +pub mod user; + +pub use self::{ + pagination::{Page, Pagination}, + repository::{ + BoxRepository, BoxRepositoryFactory, Repository, RepositoryAccess, RepositoryError, + RepositoryFactory, RepositoryTransaction, + }, + utils::MapErr, +}; diff --git a/matrix-authentication-service/crates/storage/src/oauth2/access_token.rs b/matrix-authentication-service/crates/storage/src/oauth2/access_token.rs new file mode 100644 index 00000000..0001a4ee --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/oauth2/access_token.rs @@ -0,0 +1,198 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Duration, Utc}; +use mas_data_model::{AccessToken, Clock, Session}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::repository_impl; + +/// An [`OAuth2AccessTokenRepository`] helps interacting with [`AccessToken`] +/// saved in the storage backend +#[async_trait] +pub trait OAuth2AccessTokenRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup an access token by its ID + /// + /// Returns the access token if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `id`: The ID of the access token to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find an access token by its token + /// + /// Returns the access token if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `access_token`: The token of the access token to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_token( + &mut self, + access_token: &str, + ) -> Result, Self::Error>; + + /// Add a new access token to the database + /// + /// Returns the newly created access token + /// + /// # Parameters + /// + /// * `rng`: A random number generator + /// * `clock`: The clock used to generate timestamps + /// * `session`: The session the access token is associated with + /// * `access_token`: The access token to add + /// * `expires_after`: The duration after which the access token expires. If + /// [`None`] the access token never expires + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + session: &Session, + access_token: String, + expires_after: Option, + ) -> Result; + + /// Revoke an access token + /// + /// Returns the revoked access token + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `access_token`: The access token to revoke + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn revoke( + &mut self, + clock: &dyn Clock, + access_token: AccessToken, + ) -> Result; + + /// Mark the access token as used, to track when it was first used + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `access_token`: The access token to mark as used + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn mark_used( + &mut self, + clock: &dyn Clock, + access_token: AccessToken, + ) -> Result; + + /// Cleanup revoked access tokens + /// + /// Returns the number of access tokens that were cleaned up, as well as the + /// timestamp of the last access token revoked + /// + /// # Parameters + /// + /// * `since`: An optional datetime since which to clean up revoked access + /// tokens. This is useful to call this method multiple times in a row + /// * `until`: The datetime until which to clean up revoked access tokens + /// * `limit`: The maximum number of access tokens to clean up + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_revoked( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + /// Cleanup expired access tokens + /// + /// Returns the number of access tokens that were cleaned up, as well as the + /// timestamp of the last access token expiration + /// + /// # Parameters + /// + /// * `since`: An optional datetime since which to clean up expired access + /// tokens. This is useful to call this method multiple times in a row + /// * `until`: The datetime until which to clean up expired access tokens + /// * `limit`: The maximum number of access tokens to clean up + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_expired( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +} + +repository_impl!(OAuth2AccessTokenRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_by_token( + &mut self, + access_token: &str, + ) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + session: &Session, + access_token: String, + expires_after: Option, + ) -> Result; + + async fn revoke( + &mut self, + clock: &dyn Clock, + access_token: AccessToken, + ) -> Result; + + async fn mark_used( + &mut self, + clock: &dyn Clock, + access_token: AccessToken, + ) -> Result; + + async fn cleanup_revoked( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + async fn cleanup_expired( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/oauth2/authorization_grant.rs b/matrix-authentication-service/crates/storage/src/oauth2/authorization_grant.rs new file mode 100644 index 00000000..c0f1030e --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/oauth2/authorization_grant.rs @@ -0,0 +1,198 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{AuthorizationCode, AuthorizationGrant, Client, Clock, Session}; +use oauth2_types::{requests::ResponseMode, scope::Scope}; +use rand_core::RngCore; +use ulid::Ulid; +use url::Url; + +use crate::repository_impl; + +/// An [`OAuth2AuthorizationGrantRepository`] helps interacting with +/// [`AuthorizationGrant`] saved in the storage backend +#[async_trait] +pub trait OAuth2AuthorizationGrantRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Create a new authorization grant + /// + /// Returns the newly created authorization grant + /// + /// # Parameters + /// + /// * `rng`: A random number generator + /// * `clock`: The clock used to generate timestamps + /// * `client`: The client that requested the authorization grant + /// * `redirect_uri`: The redirect URI the client requested + /// * `scope`: The scope the client requested + /// * `code`: The authorization code used by this grant, if the `code` + /// `response_type` was requested + /// * `state`: The state the client sent, if set + /// * `nonce`: The nonce the client sent, if set + /// * `response_mode`: The response mode the client requested + /// * `response_type_id_token`: Whether the `id_token` `response_type` was + /// requested + /// * `login_hint`: The `login_hint` the client sent, if set + /// * `locale`: The locale the detected when the user asked for the + /// authorization grant + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + #[allow(clippy::too_many_arguments)] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + redirect_uri: Url, + scope: Scope, + code: Option, + state: Option, + nonce: Option, + response_mode: ResponseMode, + response_type_id_token: bool, + login_hint: Option, + locale: Option, + ) -> Result; + + /// Lookup an authorization grant by its ID + /// + /// Returns the authorization grant if found, `None` otherwise + /// + /// # Parameters + /// + /// * `id`: The ID of the authorization grant to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find an authorization grant by its code + /// + /// Returns the authorization grant if found, `None` otherwise + /// + /// # Parameters + /// + /// * `code`: The code of the authorization grant to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_code(&mut self, code: &str) + -> Result, Self::Error>; + + /// Fulfill an authorization grant, by giving the [`Session`] that it + /// created + /// + /// Returns the updated authorization grant + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `session`: The session that was created using this authorization grant + /// * `authorization_grant`: The authorization grant to fulfill + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn fulfill( + &mut self, + clock: &dyn Clock, + session: &Session, + authorization_grant: AuthorizationGrant, + ) -> Result; + + /// Mark an authorization grant as exchanged + /// + /// Returns the updated authorization grant + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `authorization_grant`: The authorization grant to mark as exchanged + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn exchange( + &mut self, + clock: &dyn Clock, + authorization_grant: AuthorizationGrant, + ) -> Result; + + /// Cleanup old authorization grants + /// + /// This will delete authorization grants with IDs up to and including + /// `until`. Uses ULID cursor-based pagination for efficiency. + /// + /// Returns the number of grants deleted and the cursor for the next batch + /// + /// # Parameters + /// + /// * `since`: The cursor to start from (exclusive), or `None` to start from + /// the beginning + /// * `until`: The maximum ULID to delete (inclusive upper bound) + /// * `limit`: The maximum number of grants to delete in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +} + +repository_impl!(OAuth2AuthorizationGrantRepository: + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + redirect_uri: Url, + scope: Scope, + code: Option, + state: Option, + nonce: Option, + response_mode: ResponseMode, + response_type_id_token: bool, + login_hint: Option, + locale: Option, + ) -> Result; + + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_by_code(&mut self, code: &str) + -> Result, Self::Error>; + + async fn fulfill( + &mut self, + clock: &dyn Clock, + session: &Session, + authorization_grant: AuthorizationGrant, + ) -> Result; + + async fn exchange( + &mut self, + clock: &dyn Clock, + authorization_grant: AuthorizationGrant, + ) -> Result; + + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/oauth2/client.rs b/matrix-authentication-service/crates/storage/src/oauth2/client.rs new file mode 100644 index 00000000..7e376e1c --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/oauth2/client.rs @@ -0,0 +1,254 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::{BTreeMap, BTreeSet}; + +use async_trait::async_trait; +use mas_data_model::{Client, Clock}; +use mas_iana::{jose::JsonWebSignatureAlg, oauth::OAuthClientAuthenticationMethod}; +use mas_jose::jwk::PublicJsonWebKeySet; +use oauth2_types::{oidc::ApplicationType, requests::GrantType}; +use rand_core::RngCore; +use ulid::Ulid; +use url::Url; + +use crate::repository_impl; + +/// An [`OAuth2ClientRepository`] helps interacting with [`Client`] saved in the +/// storage backend +#[async_trait] +pub trait OAuth2ClientRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup an OAuth client by its ID + /// + /// Returns `None` if the client does not exist + /// + /// # Parameters + /// + /// * `id`: The ID of the client to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find an OAuth client by its client ID + async fn find_by_client_id(&mut self, client_id: &str) -> Result, Self::Error> { + let Ok(id) = client_id.parse() else { + return Ok(None); + }; + self.lookup(id).await + } + + /// Find an OAuth client by its metadata digest + /// + /// Returns `None` if the client does not exist + /// + /// # Parameters + /// + /// * `digest`: The metadata digest (SHA-256 hash encoded in hex) of the + /// client to find + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_metadata_digest( + &mut self, + digest: &str, + ) -> Result, Self::Error>; + + /// Load a batch of OAuth clients by their IDs + /// + /// Returns a map of client IDs to clients. If a client does not exist, it + /// is not present in the map. + /// + /// # Parameters + /// + /// * `ids`: The IDs of the clients to load + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn load_batch( + &mut self, + ids: BTreeSet, + ) -> Result, Self::Error>; + + /// Add a new OAuth client + /// + /// Returns the client that was added + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `redirect_uris`: The list of redirect URIs used by this client + /// * `metadata_digest`: The hash of the client metadata, if computed + /// * `encrypted_client_secret`: The encrypted client secret, if any + /// * `application_type`: The application type of this client + /// * `grant_types`: The list of grant types this client can use + /// * `client_name`: The human-readable name of this client, if given + /// * `logo_uri`: The URI of the logo of this client, if given + /// * `client_uri`: The URI of a website of this client, if given + /// * `policy_uri`: The URI of the privacy policy of this client, if given + /// * `tos_uri`: The URI of the terms of service of this client, if given + /// * `jwks_uri`: The URI of the JWKS of this client, if given + /// * `jwks`: The JWKS of this client, if given + /// * `id_token_signed_response_alg`: The algorithm used to sign the ID + /// token + /// * `userinfo_signed_response_alg`: The algorithm used to sign the user + /// info. If none, the user info endpoint will not sign the response + /// * `token_endpoint_auth_method`: The authentication method used by this + /// client when calling the token endpoint + /// * `token_endpoint_auth_signing_alg`: The algorithm used to sign the JWT + /// when using the `client_secret_jwt` or `private_key_jwt` authentication + /// methods + /// * `initiate_login_uri`: The URI used to initiate a login, if given + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + #[allow(clippy::too_many_arguments)] + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + redirect_uris: Vec, + metadata_digest: Option, + encrypted_client_secret: Option, + application_type: Option, + grant_types: Vec, + client_name: Option, + logo_uri: Option, + client_uri: Option, + policy_uri: Option, + tos_uri: Option, + jwks_uri: Option, + jwks: Option, + id_token_signed_response_alg: Option, + userinfo_signed_response_alg: Option, + token_endpoint_auth_method: Option, + token_endpoint_auth_signing_alg: Option, + initiate_login_uri: Option, + ) -> Result; + + /// Add or replace a static client + /// + /// Returns the client that was added or replaced + /// + /// # Parameters + /// + /// * `client_id`: The client ID + /// * `client_auth_method`: The authentication method this client uses + /// * `encrypted_client_secret`: The encrypted client secret, if any + /// * `jwks`: The client JWKS, if any + /// * `jwks_uri`: The client JWKS URI, if any + /// * `redirect_uris`: The list of redirect URIs used by this client + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + #[allow(clippy::too_many_arguments)] + async fn upsert_static( + &mut self, + client_id: Ulid, + client_name: Option, + client_auth_method: OAuthClientAuthenticationMethod, + encrypted_client_secret: Option, + jwks: Option, + jwks_uri: Option, + redirect_uris: Vec, + ) -> Result; + + /// List all static clients + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn all_static(&mut self) -> Result, Self::Error>; + + /// Delete a client + /// + /// # Parameters + /// + /// * `client`: The client to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails, or if the + /// client does not exist + async fn delete(&mut self, client: Client) -> Result<(), Self::Error> { + self.delete_by_id(client.id).await + } + + /// Delete a client by ID + /// + /// # Parameters + /// + /// * `id`: The ID of the client to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails, or if the + /// client does not exist + async fn delete_by_id(&mut self, id: Ulid) -> Result<(), Self::Error>; +} + +repository_impl!(OAuth2ClientRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_by_metadata_digest( + &mut self, + digest: &str, + ) -> Result, Self::Error>; + + async fn load_batch( + &mut self, + ids: BTreeSet, + ) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + redirect_uris: Vec, + metadata_digest: Option, + encrypted_client_secret: Option, + application_type: Option, + grant_types: Vec, + client_name: Option, + logo_uri: Option, + client_uri: Option, + policy_uri: Option, + tos_uri: Option, + jwks_uri: Option, + jwks: Option, + id_token_signed_response_alg: Option, + userinfo_signed_response_alg: Option, + token_endpoint_auth_method: Option, + token_endpoint_auth_signing_alg: Option, + initiate_login_uri: Option, + ) -> Result; + + async fn upsert_static( + &mut self, + client_id: Ulid, + client_name: Option, + client_auth_method: OAuthClientAuthenticationMethod, + encrypted_client_secret: Option, + jwks: Option, + jwks_uri: Option, + redirect_uris: Vec, + ) -> Result; + + async fn all_static(&mut self) -> Result, Self::Error>; + + async fn delete(&mut self, client: Client) -> Result<(), Self::Error>; + + async fn delete_by_id(&mut self, id: Ulid) -> Result<(), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/oauth2/device_code_grant.rs b/matrix-authentication-service/crates/storage/src/oauth2/device_code_grant.rs new file mode 100644 index 00000000..7b8915cb --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/oauth2/device_code_grant.rs @@ -0,0 +1,260 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::Duration; +use mas_data_model::{BrowserSession, Client, Clock, DeviceCodeGrant, Session}; +use oauth2_types::scope::Scope; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::repository_impl; + +/// Parameters used to create a new [`DeviceCodeGrant`] +pub struct OAuth2DeviceCodeGrantParams<'a> { + /// The client which requested the device code grant + pub client: &'a Client, + + /// The scope requested by the client + pub scope: Scope, + + /// The device code which the client uses to poll for authorisation + pub device_code: String, + + /// The user code which the client uses to display to the user + pub user_code: String, + + /// After how long the device code expires + pub expires_in: Duration, + + /// IP address from which the request was made + pub ip_address: Option, + + /// The user agent from which the request was made + pub user_agent: Option, +} + +/// An [`OAuth2DeviceCodeGrantRepository`] helps interacting with +/// [`DeviceCodeGrant`] saved in the storage backend. +#[async_trait] +pub trait OAuth2DeviceCodeGrantRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Create a new device code grant + /// + /// Returns the newly created device code grant + /// + /// # Parameters + /// + /// * `rng`: A random number generator + /// * `clock`: The clock used to generate timestamps + /// * `params`: The parameters used to create the device code grant. See the + /// fields of [`OAuth2DeviceCodeGrantParams`] + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + params: OAuth2DeviceCodeGrantParams<'_>, + ) -> Result; + + /// Lookup a device code grant by its ID + /// + /// Returns the device code grant if found, [`None`] otherwise + /// + /// # Parameters + /// + /// * `id`: The ID of the device code grant + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Lookup a device code grant by its device code + /// + /// Returns the device code grant if found, [`None`] otherwise + /// + /// # Parameters + /// + /// * `device_code`: The device code of the device code grant + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_device_code( + &mut self, + device_code: &str, + ) -> Result, Self::Error>; + + /// Lookup a device code grant by its user code + /// + /// Returns the device code grant if found, [`None`] otherwise + /// + /// # Parameters + /// + /// * `user_code`: The user code of the device code grant + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_user_code( + &mut self, + user_code: &str, + ) -> Result, Self::Error>; + + /// Mark the device code grant as fulfilled with the given browser session + /// + /// Returns the updated device code grant + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `device_code_grant`: The device code grant to fulfill + /// * `browser_session`: The browser session which was used to fulfill the + /// device code grant + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// device code grant is not in the [`Pending`] state + /// + /// [`Pending`]: mas_data_model::DeviceCodeGrantState::Pending + async fn fulfill( + &mut self, + clock: &dyn Clock, + device_code_grant: DeviceCodeGrant, + browser_session: &BrowserSession, + ) -> Result; + + /// Mark the device code grant as rejected with the given browser session + /// + /// Returns the updated device code grant + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `device_code_grant`: The device code grant to reject + /// * `browser_session`: The browser session which was used to reject the + /// device code grant + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// device code grant is not in the [`Pending`] state + /// + /// [`Pending`]: mas_data_model::DeviceCodeGrantState::Pending + async fn reject( + &mut self, + clock: &dyn Clock, + device_code_grant: DeviceCodeGrant, + browser_session: &BrowserSession, + ) -> Result; + + /// Mark the device code grant as exchanged and store the session which was + /// created + /// + /// Returns the updated device code grant + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `device_code_grant`: The device code grant to exchange + /// * `session`: The OAuth 2.0 session which was created + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// device code grant is not in the [`Fulfilled`] state + /// + /// [`Fulfilled`]: mas_data_model::DeviceCodeGrantState::Fulfilled + async fn exchange( + &mut self, + clock: &dyn Clock, + device_code_grant: DeviceCodeGrant, + session: &Session, + ) -> Result; + + /// Cleanup old device code grants + /// + /// This will delete device code grants that were created before `until`. + /// Uses ULID cursor-based pagination for efficiency. + /// + /// Returns the number of grants deleted and the cursor for the next batch + /// + /// # Parameters + /// + /// * `since`: The cursor to start from (exclusive), or `None` to start from + /// the beginning + /// * `until`: The ULID threshold representing 7 days ago + /// * `limit`: The maximum number of grants to delete in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +} + +repository_impl!(OAuth2DeviceCodeGrantRepository: + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + params: OAuth2DeviceCodeGrantParams<'_>, + ) -> Result; + + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_by_device_code( + &mut self, + device_code: &str, + ) -> Result, Self::Error>; + + async fn find_by_user_code( + &mut self, + user_code: &str, + ) -> Result, Self::Error>; + + async fn fulfill( + &mut self, + clock: &dyn Clock, + device_code_grant: DeviceCodeGrant, + browser_session: &BrowserSession, + ) -> Result; + + async fn reject( + &mut self, + clock: &dyn Clock, + device_code_grant: DeviceCodeGrant, + browser_session: &BrowserSession, + ) -> Result; + + async fn exchange( + &mut self, + clock: &dyn Clock, + device_code_grant: DeviceCodeGrant, + session: &Session, + ) -> Result; + + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/oauth2/mod.rs b/matrix-authentication-service/crates/storage/src/oauth2/mod.rs new file mode 100644 index 00000000..a2d17256 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/oauth2/mod.rs @@ -0,0 +1,23 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repositories to interact with entities related to the OAuth 2.0 protocol + +mod access_token; +mod authorization_grant; +mod client; +mod device_code_grant; +mod refresh_token; +mod session; + +pub use self::{ + access_token::OAuth2AccessTokenRepository, + authorization_grant::OAuth2AuthorizationGrantRepository, + client::OAuth2ClientRepository, + device_code_grant::{OAuth2DeviceCodeGrantParams, OAuth2DeviceCodeGrantRepository}, + refresh_token::OAuth2RefreshTokenRepository, + session::{OAuth2SessionFilter, OAuth2SessionRepository}, +}; diff --git a/matrix-authentication-service/crates/storage/src/oauth2/refresh_token.rs b/matrix-authentication-service/crates/storage/src/oauth2/refresh_token.rs new file mode 100644 index 00000000..c0ef40ec --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/oauth2/refresh_token.rs @@ -0,0 +1,206 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{AccessToken, Clock, RefreshToken, Session}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::repository_impl; + +/// An [`OAuth2RefreshTokenRepository`] helps interacting with [`RefreshToken`] +/// saved in the storage backend +#[async_trait] +pub trait OAuth2RefreshTokenRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a refresh token by its ID + /// + /// Returns `None` if no [`RefreshToken`] was found + /// + /// # Parameters + /// + /// * `id`: The ID of the [`RefreshToken`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find a refresh token by its token + /// + /// Returns `None` if no [`RefreshToken`] was found + /// + /// # Parameters + /// + /// * `token`: The token of the [`RefreshToken`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_token( + &mut self, + refresh_token: &str, + ) -> Result, Self::Error>; + + /// Add a new refresh token to the database + /// + /// Returns the newly created [`RefreshToken`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `session`: The [`Session`] in which to create the [`RefreshToken`] + /// * `access_token`: The [`AccessToken`] created alongside this + /// [`RefreshToken`] + /// * `refresh_token`: The refresh token to store + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + session: &Session, + access_token: &AccessToken, + refresh_token: String, + ) -> Result; + + /// Consume a refresh token + /// + /// Returns the updated [`RefreshToken`] + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `refresh_token`: The [`RefreshToken`] to consume + /// * `replaced_by`: The [`RefreshToken`] which replaced this one + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails, or if the + /// token was already consumed or revoked + async fn consume( + &mut self, + clock: &dyn Clock, + refresh_token: RefreshToken, + replaced_by: &RefreshToken, + ) -> Result; + + /// Revoke a refresh token + /// + /// Returns the updated [`RefreshToken`] + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `refresh_token`: The [`RefreshToken`] to revoke + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails, or if the + /// token was already revoked or consumed + async fn revoke( + &mut self, + clock: &dyn Clock, + refresh_token: RefreshToken, + ) -> Result; + + /// Cleanup revoked refresh tokens that were revoked before a certain time + /// + /// Returns the number of deleted tokens and the last `revoked_at` timestamp + /// processed + /// + /// # Parameters + /// + /// * `since`: An optional timestamp to start from + /// * `until`: The timestamp before which to delete tokens + /// * `limit`: The maximum number of tokens to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_revoked( + &mut self, + since: Option>, + until: chrono::DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + /// Cleanup consumed refresh tokens that were consumed before a certain time + /// + /// A token is considered as fully consumed only if both the `consumed_at` + /// column is set and the next refresh token in the chain also has its + /// `consumed_at` set. + /// + /// Returns the number of deleted tokens and the last `consumed_at` + /// timestamp processed + /// + /// # Parameters + /// + /// * `since`: An optional timestamp to start from + /// * `until`: The timestamp before which to delete tokens + /// * `limit`: The maximum number of tokens to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_consumed( + &mut self, + since: Option>, + until: chrono::DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +} + +repository_impl!(OAuth2RefreshTokenRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_by_token( + &mut self, + refresh_token: &str, + ) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + session: &Session, + access_token: &AccessToken, + refresh_token: String, + ) -> Result; + + async fn consume( + &mut self, + clock: &dyn Clock, + refresh_token: RefreshToken, + replaced_by: &RefreshToken, + ) -> Result; + + async fn revoke( + &mut self, + clock: &dyn Clock, + refresh_token: RefreshToken, + ) -> Result; + + async fn cleanup_revoked( + &mut self, + since: Option>, + until: chrono::DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + async fn cleanup_consumed( + &mut self, + since: Option>, + until: chrono::DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/oauth2/session.rs b/matrix-authentication-service/crates/storage/src/oauth2/session.rs new file mode 100644 index 00000000..75035653 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/oauth2/session.rs @@ -0,0 +1,591 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{BrowserSession, Client, Clock, Device, Session, User}; +use oauth2_types::scope::Scope; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::{Pagination, pagination::Page, repository_impl, user::BrowserSessionFilter}; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum OAuth2SessionState { + Active, + Finished, +} + +impl OAuth2SessionState { + pub fn is_active(self) -> bool { + matches!(self, Self::Active) + } + + pub fn is_finished(self) -> bool { + matches!(self, Self::Finished) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum ClientKind { + Static, + Dynamic, +} + +impl ClientKind { + pub fn is_static(self) -> bool { + matches!(self, Self::Static) + } +} + +/// Filter parameters for listing OAuth 2.0 sessions +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct OAuth2SessionFilter<'a> { + user: Option<&'a User>, + any_user: Option, + browser_session: Option<&'a BrowserSession>, + browser_session_filter: Option>, + device: Option<&'a Device>, + client: Option<&'a Client>, + client_kind: Option, + state: Option, + scope: Option<&'a Scope>, + last_active_before: Option>, + last_active_after: Option>, +} + +impl<'a> OAuth2SessionFilter<'a> { + /// Create a new [`OAuth2SessionFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// List sessions for a specific user + #[must_use] + pub fn for_user(mut self, user: &'a User) -> Self { + self.user = Some(user); + self + } + + /// Get the user filter + /// + /// Returns [`None`] if no user filter was set + #[must_use] + pub fn user(&self) -> Option<&'a User> { + self.user + } + + /// List sessions which belong to any user + #[must_use] + pub fn for_any_user(mut self) -> Self { + self.any_user = Some(true); + self + } + + /// List sessions which belong to no user + #[must_use] + pub fn for_no_user(mut self) -> Self { + self.any_user = Some(false); + self + } + + /// Get the 'any user' filter + /// + /// Returns [`None`] if no 'any user' filter was set + #[must_use] + pub fn any_user(&self) -> Option { + self.any_user + } + + /// List sessions started by a specific browser session + #[must_use] + pub fn for_browser_session(mut self, browser_session: &'a BrowserSession) -> Self { + self.browser_session = Some(browser_session); + self + } + + /// List sessions started by a set of browser sessions + #[must_use] + pub fn for_browser_sessions( + mut self, + browser_session_filter: BrowserSessionFilter<'a>, + ) -> Self { + self.browser_session_filter = Some(browser_session_filter); + self + } + + /// Get the browser session filter + /// + /// Returns [`None`] if no browser session filter was set + #[must_use] + pub fn browser_session(&self) -> Option<&'a BrowserSession> { + self.browser_session + } + + /// Get the browser sessions filter + /// + /// Returns [`None`] if no browser session filter was set + #[must_use] + pub fn browser_session_filter(&self) -> Option> { + self.browser_session_filter + } + + /// List sessions for a specific client + #[must_use] + pub fn for_client(mut self, client: &'a Client) -> Self { + self.client = Some(client); + self + } + + /// Get the client filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn client(&self) -> Option<&'a Client> { + self.client + } + + /// List only static clients + #[must_use] + pub fn only_static_clients(mut self) -> Self { + self.client_kind = Some(ClientKind::Static); + self + } + + /// List only dynamic clients + #[must_use] + pub fn only_dynamic_clients(mut self) -> Self { + self.client_kind = Some(ClientKind::Dynamic); + self + } + + /// Get the client kind filter + /// + /// Returns [`None`] if no client kind filter was set + #[must_use] + pub fn client_kind(&self) -> Option { + self.client_kind + } + + /// Only return sessions with a last active time before the given time + #[must_use] + pub fn with_last_active_before(mut self, last_active_before: DateTime) -> Self { + self.last_active_before = Some(last_active_before); + self + } + + /// Only return sessions with a last active time after the given time + #[must_use] + pub fn with_last_active_after(mut self, last_active_after: DateTime) -> Self { + self.last_active_after = Some(last_active_after); + self + } + + /// Get the last active before filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_before(&self) -> Option> { + self.last_active_before + } + + /// Get the last active after filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_after(&self) -> Option> { + self.last_active_after + } + + /// Only return active sessions + #[must_use] + pub fn active_only(mut self) -> Self { + self.state = Some(OAuth2SessionState::Active); + self + } + + /// Only return finished sessions + #[must_use] + pub fn finished_only(mut self) -> Self { + self.state = Some(OAuth2SessionState::Finished); + self + } + + /// Get the state filter + /// + /// Returns [`None`] if no state filter was set + #[must_use] + pub fn state(&self) -> Option { + self.state + } + + /// Only return sessions with the given scope + #[must_use] + pub fn with_scope(mut self, scope: &'a Scope) -> Self { + self.scope = Some(scope); + self + } + + /// Get the scope filter + /// + /// Returns [`None`] if no scope filter was set + #[must_use] + pub fn scope(&self) -> Option<&'a Scope> { + self.scope + } + + /// Only return sessions that have the given device in their scope + #[must_use] + pub fn for_device(mut self, device: &'a Device) -> Self { + self.device = Some(device); + self + } + + /// Get the device filter + /// + /// Returns [`None`] if no device filter was set + #[must_use] + pub fn device(&self) -> Option<&'a Device> { + self.device + } +} + +/// An [`OAuth2SessionRepository`] helps interacting with [`Session`] +/// saved in the storage backend +#[async_trait] +pub trait OAuth2SessionRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup an [`Session`] by its ID + /// + /// Returns `None` if no [`Session`] was found + /// + /// # Parameters + /// + /// * `id`: The ID of the [`Session`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Create a new [`Session`] with the given parameters + /// + /// Returns the newly created [`Session`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `client`: The [`Client`] which created the [`Session`] + /// * `user`: The [`User`] for which the session should be created, if any + /// * `user_session`: The [`BrowserSession`] of the user which completed the + /// authorization, if any + /// * `scope`: The [`Scope`] of the [`Session`] + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + user: Option<&User>, + user_session: Option<&BrowserSession>, + scope: Scope, + ) -> Result; + + /// Create a new [`Session`] out of a [`Client`] and a [`BrowserSession`] + /// + /// Returns the newly created [`Session`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `client`: The [`Client`] which created the [`Session`] + /// * `user_session`: The [`BrowserSession`] of the user which completed the + /// authorization + /// * `scope`: The [`Scope`] of the [`Session`] + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add_from_browser_session( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + user_session: &BrowserSession, + scope: Scope, + ) -> Result { + self.add( + rng, + clock, + client, + Some(&user_session.user), + Some(user_session), + scope, + ) + .await + } + + /// Create a new [`Session`] for a [`Client`] using the client credentials + /// flow + /// + /// Returns the newly created [`Session`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `client`: The [`Client`] which created the [`Session`] + /// * `scope`: The [`Scope`] of the [`Session`] + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add_from_client_credentials( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + scope: Scope, + ) -> Result { + self.add(rng, clock, client, None, None, scope).await + } + + /// Mark a [`Session`] as finished + /// + /// Returns the updated [`Session`] + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `session`: The [`Session`] to mark as finished + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn finish(&mut self, clock: &dyn Clock, session: Session) + -> Result; + + /// Mark all the [`Session`] matching the given filter as finished + /// + /// Returns the number of sessions affected + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `filter`: The filter parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn finish_bulk( + &mut self, + clock: &dyn Clock, + filter: OAuth2SessionFilter<'_>, + ) -> Result; + + /// List [`Session`]s matching the given filter and pagination parameters + /// + /// # Parameters + /// + /// * `filter`: The filter parameters + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: OAuth2SessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count [`Session`]s matching the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: OAuth2SessionFilter<'_>) -> Result; + + /// Record a batch of [`Session`] activity + /// + /// # Parameters + /// + /// * `activity`: A list of tuples containing the session ID, the last + /// activity timestamp and the IP address of the client + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn record_batch_activity( + &mut self, + activity: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error>; + + /// Record the user agent of a [`Session`] + /// + /// # Parameters + /// + /// * `session`: The [`Session`] to record the user agent for + /// * `user_agent`: The user agent to record + async fn record_user_agent( + &mut self, + session: Session, + user_agent: String, + ) -> Result; + + /// Set the human name of a [`Session`] + /// + /// # Parameters + /// + /// * `session`: The [`Session`] to set the human name for + /// * `human_name`: The human name to set + async fn set_human_name( + &mut self, + session: Session, + human_name: Option, + ) -> Result; + + /// Cleanup finished [`Session`]s + /// + /// Deletes sessions finished between `since` and `until`. Returns the + /// number of deleted sessions and the timestamp of the last deleted + /// session for pagination. + /// + /// # Parameters + /// + /// * `since`: The earliest finish time to delete (exclusive). If `None`, + /// starts from the beginning. + /// * `until`: The latest finish time to delete (exclusive) + /// * `limit`: Maximum number of sessions to delete in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_finished( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + /// Clear IP addresses from sessions inactive since the threshold + /// + /// Sets `last_active_ip` to `NULL` for sessions where `last_active_at` is + /// before the threshold. Returns the number of sessions affected and the + /// last `last_active_at` timestamp processed for pagination. + /// + /// # Parameters + /// + /// * `since`: Only process sessions with `last_active_at` at or after this + /// timestamp (exclusive). If `None`, starts from the beginning. + /// * `threshold`: Clear IPs for sessions with `last_active_at` before this + /// time + /// * `limit`: Maximum number of sessions to update in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_inactive_ips( + &mut self, + since: Option>, + threshold: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +} + +repository_impl!(OAuth2SessionRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + user: Option<&User>, + user_session: Option<&BrowserSession>, + scope: Scope, + ) -> Result; + + async fn add_from_browser_session( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + user_session: &BrowserSession, + scope: Scope, + ) -> Result; + + async fn add_from_client_credentials( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + client: &Client, + scope: Scope, + ) -> Result; + + async fn finish(&mut self, clock: &dyn Clock, session: Session) + -> Result; + + async fn finish_bulk( + &mut self, + clock: &dyn Clock, + filter: OAuth2SessionFilter<'_>, + ) -> Result; + + async fn list( + &mut self, + filter: OAuth2SessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + async fn count(&mut self, filter: OAuth2SessionFilter<'_>) -> Result; + + async fn record_batch_activity( + &mut self, + activity: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error>; + + async fn record_user_agent( + &mut self, + session: Session, + user_agent: String, + ) -> Result; + + async fn set_human_name( + &mut self, + session: Session, + human_name: Option, + ) -> Result; + + async fn cleanup_finished( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + async fn cleanup_inactive_ips( + &mut self, + since: Option>, + threshold: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/pagination.rs b/matrix-authentication-service/crates/storage/src/pagination.rs new file mode 100644 index 00000000..ad632cb1 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/pagination.rs @@ -0,0 +1,237 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Utilities to manage paginated queries. + +use thiserror::Error; +use ulid::Ulid; + +/// An error returned when invalid pagination parameters are provided +#[derive(Debug, Error)] +#[error("Either 'first' or 'last' must be specified")] +pub struct InvalidPagination; + +/// Pagination parameters +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct Pagination { + /// The cursor to start from + pub before: Option, + + /// The cursor to end at + pub after: Option, + + /// The maximum number of items to return + pub count: usize, + + /// In which direction to paginate + pub direction: PaginationDirection, +} + +/// The direction to paginate +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum PaginationDirection { + /// Paginate forward + Forward, + + /// Paginate backward + Backward, +} + +/// A node in a page, with a cursor +pub trait Node { + /// The cursor of that particular node + fn cursor(&self) -> C; +} + +impl Pagination { + /// Creates a new [`Pagination`] from user-provided parameters. + /// + /// # Errors + /// + /// Either `first` or `last` must be provided, else this function will + /// return an [`InvalidPagination`] error. + pub fn try_new( + before: Option, + after: Option, + first: Option, + last: Option, + ) -> Result { + let (direction, count) = match (first, last) { + (Some(first), _) => (PaginationDirection::Forward, first), + (_, Some(last)) => (PaginationDirection::Backward, last), + (None, None) => return Err(InvalidPagination), + }; + + Ok(Self { + before, + after, + count, + direction, + }) + } + + /// Creates a [`Pagination`] which gets the first N items + #[must_use] + pub const fn first(first: usize) -> Self { + Self { + before: None, + after: None, + count: first, + direction: PaginationDirection::Forward, + } + } + + /// Creates a [`Pagination`] which gets the last N items + #[must_use] + pub const fn last(last: usize) -> Self { + Self { + before: None, + after: None, + count: last, + direction: PaginationDirection::Backward, + } + } + + /// Get items before the given cursor + #[must_use] + pub fn before(mut self, cursor: C) -> Self { + self.before = Some(cursor); + self + } + + /// Clear the before cursor + #[must_use] + pub fn clear_before(mut self) -> Self { + self.before = None; + self + } + + /// Get items after the given cursor + #[must_use] + pub fn after(mut self, cursor: C) -> Self { + self.after = Some(cursor); + self + } + + /// Clear the after cursor + #[must_use] + pub fn clear_after(mut self) -> Self { + self.after = None; + self + } + + /// Process a page returned by a paginated query + #[must_use] + pub fn process>(&self, mut nodes: Vec) -> Page { + let is_full = nodes.len() == (self.count + 1); + if is_full { + nodes.pop(); + } + + let (has_previous_page, has_next_page) = match self.direction { + PaginationDirection::Forward => (false, is_full), + PaginationDirection::Backward => { + // 6. If the last argument is provided, I reverse the order of the results + nodes.reverse(); + (is_full, false) + } + }; + + let edges = nodes + .into_iter() + .map(|node| Edge { + cursor: node.cursor(), + node, + }) + .collect(); + + Page { + has_next_page, + has_previous_page, + edges, + } + } +} + +/// An edge in a paginated result +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Edge { + /// The cursor of the edge + pub cursor: C, + /// The node of the edge + pub node: T, +} + +/// A page of results returned by a paginated query +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Page { + /// When paginating forwards, this is true if there are more items after + pub has_next_page: bool, + + /// When paginating backwards, this is true if there are more items before + pub has_previous_page: bool, + + /// The items in the page + pub edges: Vec>, +} + +impl Page { + /// Map the items in this page with the given function + /// + /// # Parameters + /// + /// * `f`: The function to map the items with + #[must_use] + pub fn map(self, mut f: F) -> Page + where + F: FnMut(T) -> T2, + { + let edges = self + .edges + .into_iter() + .map(|edge| Edge { + cursor: edge.cursor, + node: f(edge.node), + }) + .collect(); + Page { + has_next_page: self.has_next_page, + has_previous_page: self.has_previous_page, + edges, + } + } + + /// Try to map the items in this page with the given fallible function + /// + /// # Parameters + /// + /// * `f`: The fallible function to map the items with + /// + /// # Errors + /// + /// Returns the first error encountered while mapping the items + pub fn try_map(self, mut f: F) -> Result, E> + where + F: FnMut(T) -> Result, + { + let edges: Result>, E> = self + .edges + .into_iter() + .map(|edge| { + Ok(Edge { + cursor: edge.cursor, + node: f(edge.node)?, + }) + }) + .collect(); + + Ok(Page { + has_next_page: self.has_next_page, + has_previous_page: self.has_previous_page, + edges: edges?, + }) + } +} diff --git a/matrix-authentication-service/crates/storage/src/personal/access_token.rs b/matrix-authentication-service/crates/storage/src/personal/access_token.rs new file mode 100644 index 00000000..363a3199 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/personal/access_token.rs @@ -0,0 +1,140 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::Duration; +use mas_data_model::{ + Clock, + personal::{PersonalAccessToken, session::PersonalSession}, +}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::repository_impl; + +/// An [`PersonalAccessTokenRepository`] helps interacting with +/// [`PersonalAccessToken`] saved in the storage backend +#[async_trait] +pub trait PersonalAccessTokenRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup an access token by its ID + /// + /// Returns the access token if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `id`: The ID of the access token to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find an access token by its token + /// + /// Returns the access token if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `access_token`: The token of the access token to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_token( + &mut self, + access_token: &str, + ) -> Result, Self::Error>; + + /// Find the active access token belonging to a given session. + /// + /// Returns the active access token if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `session`: The session to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_active_for_session( + &mut self, + session: &PersonalSession, + ) -> Result, Self::Error>; + + /// Add a new access token to the database + /// + /// Returns the newly created access token + /// + /// # Parameters + /// + /// * `rng`: A random number generator + /// * `clock`: The clock used to generate timestamps + /// * `session`: The session the access token is associated with + /// * `access_token`: The access token to add + /// * `expires_after`: The duration after which the access token expires. If + /// [`None`] the access token never expires + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + session: &PersonalSession, + access_token: &str, + expires_after: Option, + ) -> Result; + + /// Revoke an access token + /// + /// Returns the revoked access token + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `access_token`: The access token to revoke + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn revoke( + &mut self, + clock: &dyn Clock, + access_token: PersonalAccessToken, + ) -> Result; +} + +repository_impl!(PersonalAccessTokenRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_by_token( + &mut self, + access_token: &str, + ) -> Result, Self::Error>; + + async fn find_active_for_session( + &mut self, + session: &PersonalSession, + ) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + session: &PersonalSession, + access_token: &str, + expires_after: Option, + ) -> Result; + + async fn revoke( + &mut self, + clock: &dyn Clock, + access_token: PersonalAccessToken, + ) -> Result; +); diff --git a/matrix-authentication-service/crates/storage/src/personal/mod.rs b/matrix-authentication-service/crates/storage/src/personal/mod.rs new file mode 100644 index 00000000..3a9dfcd6 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/personal/mod.rs @@ -0,0 +1,16 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repositories to deal with Personal Sessions and Personal Access Tokens +//! (PATs), which are sessions/access tokens created manually by users for use +//! in scripts, bots and similar applications. + +mod access_token; +mod session; + +pub use self::{ + access_token::PersonalAccessTokenRepository, + session::{PersonalSessionFilter, PersonalSessionRepository, PersonalSessionState}, +}; diff --git a/matrix-authentication-service/crates/storage/src/personal/session.rs b/matrix-authentication-service/crates/storage/src/personal/session.rs new file mode 100644 index 00000000..921c6df3 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/personal/session.rs @@ -0,0 +1,398 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + Client, Clock, Device, User, + personal::{ + PersonalAccessToken, + session::{PersonalSession, PersonalSessionOwner}, + }, +}; +use oauth2_types::scope::Scope; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::{Page, Pagination, repository_impl}; + +/// A [`PersonalSessionRepository`] helps interacting with +/// [`PersonalSession`] saved in the storage backend +#[async_trait] +pub trait PersonalSessionRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a Personal session by its ID + /// + /// Returns the Personal session if it exists, `None` otherwise + /// + /// # Parameters + /// + /// * `id`: The ID of the Personal session to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Start a new Personal session + /// + /// Returns the newly created Personal session + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `owner_user`: The user that will own the personal session + /// * `actor_user`: The user that will be represented by the personal + /// session + /// * `device`: The device ID of this session + /// * `human_name`: The human-readable name of the session provided by the + /// client or the user + /// * `scope`: The [`Scope`] of the [`PersonalSession`] + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + owner: PersonalSessionOwner, + actor_user: &User, + human_name: String, + scope: Scope, + ) -> Result; + + /// End a Personal session + /// + /// Returns the ended Personal session + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `Personal_session`: The Personal session to end + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn revoke( + &mut self, + clock: &dyn Clock, + personal_session: PersonalSession, + ) -> Result; + + /// Revoke all the [`PersonalSession`]s matching the given filter. + /// + /// Returns the number of sessions affected + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn revoke_bulk( + &mut self, + clock: &dyn Clock, + filter: PersonalSessionFilter<'_>, + ) -> Result; + + /// List [`PersonalSession`]s matching the given filter and pagination + /// parameters + /// + /// # Parameters + /// + /// * `filter`: The filter parameters + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: PersonalSessionFilter<'_>, + pagination: Pagination, + ) -> Result)>, Self::Error>; + + /// Count [`PersonalSession`]s matching the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: PersonalSessionFilter<'_>) -> Result; + + /// Record a batch of [`PersonalSession`] activity + /// + /// # Parameters + /// + /// * `activity`: A list of tuples containing the session ID, the last + /// activity timestamp and the IP address of the client + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn record_batch_activity( + &mut self, + activity: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error>; +} + +repository_impl!(PersonalSessionRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + owner: PersonalSessionOwner, + actor_user: &User, + human_name: String, + scope: Scope, + ) -> Result; + + async fn revoke( + &mut self, + clock: &dyn Clock, + personal_session: PersonalSession, + ) -> Result; + + async fn revoke_bulk( + &mut self, + clock: &dyn Clock, + filter: PersonalSessionFilter<'_>, + ) -> Result; + + async fn list( + &mut self, + filter: PersonalSessionFilter<'_>, + pagination: Pagination, + ) -> Result)>, Self::Error>; + + async fn count(&mut self, filter: PersonalSessionFilter<'_>) -> Result; + + async fn record_batch_activity( + &mut self, + activity: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error>; +); + +/// Filter parameters for listing personal sessions alongside personal access +/// tokens +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct PersonalSessionFilter<'a> { + owner_user: Option<&'a User>, + owner_oauth2_client: Option<&'a Client>, + actor_user: Option<&'a User>, + device: Option<&'a Device>, + state: Option, + scope: Option<&'a Scope>, + last_active_before: Option>, + last_active_after: Option>, + expires_before: Option>, + expires_after: Option>, + expires: Option, +} + +/// Filter for what state a personal session is in. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum PersonalSessionState { + /// The personal session is active, which means it either + /// has active access tokens or can have new access tokens generated. + Active, + /// The personal session is revoked, which means no more access tokens + /// can be generated and none are active. + Revoked, +} + +impl<'a> PersonalSessionFilter<'a> { + /// Create a new [`PersonalSessionFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// List sessions owned by a specific user + #[must_use] + pub fn for_owner_user(mut self, user: &'a User) -> Self { + self.owner_user = Some(user); + self + } + + /// Get the owner user filter + /// + /// Returns [`None`] if no user filter was set + #[must_use] + pub fn owner_oauth2_client(&self) -> Option<&'a Client> { + self.owner_oauth2_client + } + + /// List sessions owned by a specific user + #[must_use] + pub fn for_owner_oauth2_client(mut self, client: &'a Client) -> Self { + self.owner_oauth2_client = Some(client); + self + } + + /// Get the owner user filter + /// + /// Returns [`None`] if no user filter was set + #[must_use] + pub fn owner_user(&self) -> Option<&'a User> { + self.owner_user + } + + /// List sessions acting as a specific user + #[must_use] + pub fn for_actor_user(mut self, user: &'a User) -> Self { + self.actor_user = Some(user); + self + } + + /// Get the actor user filter + /// + /// Returns [`None`] if no user filter was set + #[must_use] + pub fn actor_user(&self) -> Option<&'a User> { + self.actor_user + } + + /// Only return sessions with a last active time before the given time + #[must_use] + pub fn with_last_active_before(mut self, last_active_before: DateTime) -> Self { + self.last_active_before = Some(last_active_before); + self + } + + /// Only return sessions with a last active time after the given time + #[must_use] + pub fn with_last_active_after(mut self, last_active_after: DateTime) -> Self { + self.last_active_after = Some(last_active_after); + self + } + + /// Get the last active before filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_before(&self) -> Option> { + self.last_active_before + } + + /// Get the last active after filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_after(&self) -> Option> { + self.last_active_after + } + + /// Only return active sessions + #[must_use] + pub fn active_only(mut self) -> Self { + self.state = Some(PersonalSessionState::Active); + self + } + + /// Only return finished sessions + #[must_use] + pub fn finished_only(mut self) -> Self { + self.state = Some(PersonalSessionState::Revoked); + self + } + + /// Get the state filter + /// + /// Returns [`None`] if no state filter was set + #[must_use] + pub fn state(&self) -> Option { + self.state + } + + /// Only return sessions with the given scope + #[must_use] + pub fn with_scope(mut self, scope: &'a Scope) -> Self { + self.scope = Some(scope); + self + } + + /// Get the scope filter + /// + /// Returns [`None`] if no scope filter was set + #[must_use] + pub fn scope(&self) -> Option<&'a Scope> { + self.scope + } + + /// Only return sessions that have the given device in their scope + #[must_use] + pub fn for_device(mut self, device: &'a Device) -> Self { + self.device = Some(device); + self + } + + /// Get the device filter + /// + /// Returns [`None`] if no device filter was set + #[must_use] + pub fn device(&self) -> Option<&'a Device> { + self.device + } + + /// Only return sessions whose access tokens expire before the given time + #[must_use] + pub fn with_expires_before(mut self, expires_before: DateTime) -> Self { + self.expires_before = Some(expires_before); + self + } + + /// Get the expires before filter + /// + /// Returns [`None`] if no expires before filter was set + #[must_use] + pub fn expires_before(&self) -> Option> { + self.expires_before + } + + /// Only return sessions whose access tokens expire after the given time + #[must_use] + pub fn with_expires_after(mut self, expires_after: DateTime) -> Self { + self.expires_after = Some(expires_after); + self + } + + /// Get the expires after filter + /// + /// Returns [`None`] if no expires after filter was set + #[must_use] + pub fn expires_after(&self) -> Option> { + self.expires_after + } + + /// Only return sessions whose access tokens have, or don't have, + /// an expiry time set + #[must_use] + pub fn with_expires(mut self, expires: bool) -> Self { + self.expires = Some(expires); + self + } + + /// Get the expires filter + /// + /// Returns [`None`] if no expires filter was set + #[must_use] + pub fn expires(&self) -> Option { + self.expires + } +} diff --git a/matrix-authentication-service/crates/storage/src/policy_data.rs b/matrix-authentication-service/crates/storage/src/policy_data.rs new file mode 100644 index 00000000..68f0040a --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/policy_data.rs @@ -0,0 +1,76 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repositories to interact with the policy data saved in the storage backend. + +use async_trait::async_trait; +use mas_data_model::{Clock, PolicyData}; +use rand_core::RngCore; + +use crate::repository_impl; + +/// A [`PolicyDataRepository`] helps interacting with the policy data saved in +/// the storage backend. +#[async_trait] +pub trait PolicyDataRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Get the latest policy data + /// + /// Returns the latest policy data, or `None` if no policy data is + /// available. + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn get(&mut self) -> Result, Self::Error>; + + /// Set the latest policy data + /// + /// Returns the newly created policy data. + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate the timestamps + /// * `data`: The policy data to set + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn set( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + data: serde_json::Value, + ) -> Result; + + /// Prune old policy data + /// + /// Returns the number of entries pruned. + /// + /// # Parameters + /// + /// * `keep`: the number of old entries to keep + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn prune(&mut self, keep: usize) -> Result; +} + +repository_impl!(PolicyDataRepository: + async fn get(&mut self) -> Result, Self::Error>; + + async fn set( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + data: serde_json::Value, + ) -> Result; + + async fn prune(&mut self, keep: usize) -> Result; +); diff --git a/matrix-authentication-service/crates/storage/src/queue/job.rs b/matrix-authentication-service/crates/storage/src/queue/job.rs new file mode 100644 index 00000000..a436e066 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/queue/job.rs @@ -0,0 +1,409 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repository to interact with jobs in the job queue + +use async_trait::async_trait; +use chrono::{DateTime, Duration, Utc}; +use mas_data_model::Clock; +use opentelemetry::trace::TraceContextExt; +use rand_core::RngCore; +use serde::{Deserialize, Serialize}; +use tracing_opentelemetry::OpenTelemetrySpanExt; +use ulid::Ulid; + +use super::Worker; +use crate::repository_impl; + +/// Represents a job in the job queue +pub struct Job { + /// The ID of the job + pub id: Ulid, + + /// The queue on which the job was placed + pub queue_name: String, + + /// The payload of the job + pub payload: serde_json::Value, + + /// Arbitrary metadata about the job + pub metadata: JobMetadata, + + /// Which attempt it is + pub attempt: usize, +} + +/// Metadata stored alongside the job +#[derive(Serialize, Deserialize, Default, Clone, Debug)] +pub struct JobMetadata { + #[serde(default)] + trace_id: String, + + #[serde(default)] + span_id: String, + + #[serde(default)] + trace_flags: u8, +} + +impl JobMetadata { + fn new(span_context: &opentelemetry::trace::SpanContext) -> Self { + Self { + trace_id: span_context.trace_id().to_string(), + span_id: span_context.span_id().to_string(), + trace_flags: span_context.trace_flags().to_u8(), + } + } + + /// Get the [`opentelemetry::trace::SpanContext`] from this [`JobMetadata`] + #[must_use] + pub fn span_context(&self) -> opentelemetry::trace::SpanContext { + use opentelemetry::trace::{SpanContext, SpanId, TraceFlags, TraceId, TraceState}; + SpanContext::new( + TraceId::from_hex(&self.trace_id).unwrap_or(TraceId::INVALID), + SpanId::from_hex(&self.span_id).unwrap_or(SpanId::INVALID), + TraceFlags::new(self.trace_flags), + // Trace context is remote, as it comes from another service/from the database + true, + TraceState::NONE, + ) + } +} + +/// A trait that represents a job which can be inserted into a queue +pub trait InsertableJob: Serialize + Send { + /// The name of the queue this job belongs to + const QUEUE_NAME: &'static str; +} + +/// A [`QueueJobRepository`] is used to schedule jobs to be executed by a +/// worker. +#[async_trait] +pub trait QueueJobRepository: Send + Sync { + /// The error type returned by the repository. + type Error; + + /// Schedule a job to be executed as soon as possible by a worker. + /// + /// # Parameters + /// + /// * `rng` - The random number generator used to generate a new job ID + /// * `clock` - The clock used to generate timestamps + /// * `queue_name` - The name of the queue to schedule the job on + /// * `payload` - The payload of the job + /// * `metadata` - Arbitrary metadata about the job scheduled immediately. + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn schedule( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + queue_name: &str, + payload: serde_json::Value, + metadata: serde_json::Value, + ) -> Result<(), Self::Error>; + + /// Schedule a job to be executed at a later date by a worker. + /// + /// # Parameters + /// + /// * `rng` - The random number generator used to generate a new job ID + /// * `clock` - The clock used to generate timestamps + /// * `queue_name` - The name of the queue to schedule the job on + /// * `payload` - The payload of the job + /// * `metadata` - Arbitrary metadata about the job scheduled immediately. + /// * `scheduled_at` - The date and time to schedule the job for + /// * `schedule_name` - The name of the recurring schedule which scheduled + /// this job + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + #[allow(clippy::too_many_arguments)] + async fn schedule_later( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + queue_name: &str, + payload: serde_json::Value, + metadata: serde_json::Value, + scheduled_at: DateTime, + schedule_name: Option<&str>, + ) -> Result<(), Self::Error>; + + /// Reserve multiple jobs from multiple queues + /// + /// # Parameters + /// + /// * `clock` - The clock used to generate timestamps + /// * `worker` - The worker that is reserving the jobs + /// * `queues` - The queues to reserve jobs from + /// * `count` - The number of jobs to reserve + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn reserve( + &mut self, + clock: &dyn Clock, + worker: &Worker, + queues: &[&str], + count: usize, + ) -> Result, Self::Error>; + + /// Mark a job as completed + /// + /// # Parameters + /// + /// * `clock` - The clock used to generate timestamps + /// * `id` - The ID of the job to mark as completed + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn mark_as_completed(&mut self, clock: &dyn Clock, id: Ulid) -> Result<(), Self::Error>; + + /// Marks a job as failed. + /// + /// # Parameters + /// + /// * `clock` - The clock used to generate timestamps + /// * `id` - The ID of the job to mark as failed + /// * `reason` - The reason for the failure + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn mark_as_failed( + &mut self, + clock: &dyn Clock, + id: Ulid, + reason: &str, + ) -> Result<(), Self::Error>; + + /// Retry a job. + /// + /// # Parameters + /// + /// * `rng` - The random number generator used to generate a new job ID + /// * `clock` - The clock used to generate timestamps + /// * `id` - The ID of the job to reschedule + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn retry( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + id: Ulid, + delay: Duration, + ) -> Result<(), Self::Error>; + + /// Mark all scheduled jobs past their scheduled date as available to be + /// executed. + /// + /// Returns the number of jobs that were marked as available. + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn schedule_available_jobs(&mut self, clock: &dyn Clock) -> Result; + + /// Cleanup old completed and failed jobs + /// + /// This will delete jobs with status 'completed' or 'failed' and IDs up to + /// and including `until`. Uses ULID cursor-based pagination for efficiency. + /// + /// Returns the number of jobs deleted and the cursor for the next batch + /// + /// # Parameters + /// + /// * `since`: The cursor to start from (exclusive), or `None` to start from + /// the beginning + /// * `until`: The maximum ULID to delete (inclusive upper bound) + /// * `limit`: The maximum number of jobs to delete in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +} + +repository_impl!(QueueJobRepository: + async fn schedule( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + queue_name: &str, + payload: serde_json::Value, + metadata: serde_json::Value, + ) -> Result<(), Self::Error>; + + async fn schedule_later( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + queue_name: &str, + payload: serde_json::Value, + metadata: serde_json::Value, + scheduled_at: DateTime, + schedule_name: Option<&str>, + ) -> Result<(), Self::Error>; + + async fn reserve( + &mut self, + clock: &dyn Clock, + worker: &Worker, + queues: &[&str], + count: usize, + ) -> Result, Self::Error>; + + async fn mark_as_completed(&mut self, clock: &dyn Clock, id: Ulid) -> Result<(), Self::Error>; + + async fn mark_as_failed(&mut self, + clock: &dyn Clock, + id: Ulid, + reason: &str, + ) -> Result<(), Self::Error>; + + async fn retry( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + id: Ulid, + delay: Duration, + ) -> Result<(), Self::Error>; + + async fn schedule_available_jobs(&mut self, clock: &dyn Clock) -> Result; + + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +); + +/// Extension trait for [`QueueJobRepository`] to help adding a job to the queue +/// through the [`InsertableJob`] trait. This isn't in the +/// [`QueueJobRepository`] trait to keep it object safe. +#[async_trait] +pub trait QueueJobRepositoryExt: QueueJobRepository { + /// Schedule a job to be executed as soon as possible by a worker. + /// + /// # Parameters + /// + /// * `rng` - The random number generator used to generate a new job ID + /// * `clock` - The clock used to generate timestamps + /// * `job` - The job to schedule + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn schedule_job( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + job: J, + ) -> Result<(), Self::Error>; + + /// Schedule a job to be executed at a later date by a worker. + /// + /// # Parameters + /// + /// * `rng` - The random number generator used to generate a new job ID + /// * `clock` - The clock used to generate timestamps + /// * `job` - The job to schedule + /// * `scheduled_at` - The date and time to schedule the job for + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn schedule_job_later( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + job: J, + scheduled_at: DateTime, + ) -> Result<(), Self::Error>; +} + +#[async_trait] +impl QueueJobRepositoryExt for T +where + T: QueueJobRepository, +{ + #[tracing::instrument( + name = "db.queue_job.schedule_job", + fields( + queue_job.queue_name = J::QUEUE_NAME, + ), + skip_all, + )] + async fn schedule_job( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + job: J, + ) -> Result<(), Self::Error> { + // Grab the span context from the current span + let span = tracing::Span::current(); + let ctx = span.context(); + let span = ctx.span(); + let span_context = span.span_context(); + + let metadata = JobMetadata::new(span_context); + let metadata = serde_json::to_value(metadata).expect("Could not serialize metadata"); + + let payload = serde_json::to_value(job).expect("Could not serialize job"); + self.schedule(rng, clock, J::QUEUE_NAME, payload, metadata) + .await + } + + #[tracing::instrument( + name = "db.queue_job.schedule_job_later", + fields( + queue_job.queue_name = J::QUEUE_NAME, + ), + skip_all, + )] + async fn schedule_job_later( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + job: J, + scheduled_at: DateTime, + ) -> Result<(), Self::Error> { + // Grab the span context from the current span + let span = tracing::Span::current(); + let ctx = span.context(); + let span = ctx.span(); + let span_context = span.span_context(); + + let metadata = JobMetadata::new(span_context); + let metadata = serde_json::to_value(metadata).expect("Could not serialize metadata"); + + let payload = serde_json::to_value(job).expect("Could not serialize job"); + self.schedule_later( + rng, + clock, + J::QUEUE_NAME, + payload, + metadata, + scheduled_at, + None, + ) + .await + } +} diff --git a/matrix-authentication-service/crates/storage/src/queue/mod.rs b/matrix-authentication-service/crates/storage/src/queue/mod.rs new file mode 100644 index 00000000..958ae13d --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/queue/mod.rs @@ -0,0 +1,18 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! A module containing repositories for the job queue + +mod job; +mod schedule; +mod tasks; +mod worker; + +pub use self::{ + job::{InsertableJob, Job, JobMetadata, QueueJobRepository, QueueJobRepositoryExt}, + schedule::{QueueScheduleRepository, ScheduleStatus}, + tasks::*, + worker::{QueueWorkerRepository, Worker}, +}; diff --git a/matrix-authentication-service/crates/storage/src/queue/schedule.rs b/matrix-authentication-service/crates/storage/src/queue/schedule.rs new file mode 100644 index 00000000..32c225ec --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/queue/schedule.rs @@ -0,0 +1,57 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repository to interact with recurrent scheduled jobs in the job queue + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; + +use crate::repository_impl; + +/// [`QueueScheduleRepository::list`] returns a list of [`ScheduleStatus`], +/// which has the name of the schedule and infos about its last run +pub struct ScheduleStatus { + /// Name of the schedule, uniquely identifying it + pub schedule_name: String, + /// When the schedule was last run + pub last_scheduled_at: Option>, + /// Did the last job on this schedule finish? (successfully or not) + pub last_scheduled_job_completed: Option, +} + +/// A [`QueueScheduleRepository`] is used to interact with recurrent scheduled +/// jobs in the job queue. +#[async_trait] +pub trait QueueScheduleRepository: Send + Sync { + /// The error type returned by the repository. + type Error; + + /// Setup the list of schedules in the repository + /// + /// # Parameters + /// + /// * `schedules` - The list of schedules to setup + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn setup(&mut self, schedules: &[&'static str]) -> Result<(), Self::Error>; + + /// List the schedules in the repository, with the last time they were run + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn list(&mut self) -> Result, Self::Error>; +} + +repository_impl!(QueueScheduleRepository: + async fn setup( + &mut self, + schedules: &[&'static str], + ) -> Result<(), Self::Error>; + + async fn list(&mut self) -> Result, Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/queue/tasks.rs b/matrix-authentication-service/crates/storage/src/queue/tasks.rs new file mode 100644 index 00000000..51845a43 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/queue/tasks.rs @@ -0,0 +1,653 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use chrono::{DateTime, Utc}; +use mas_data_model::{ + BrowserSession, CompatSession, Device, Session, User, UserEmailAuthentication, + UserRecoverySession, +}; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use super::InsertableJob; +use crate::{Page, Pagination}; + +/// This is the previous iteration of the email verification job. It has been +/// replaced by [`SendEmailAuthenticationCodeJob`]. This struct is kept to be +/// able to consume jobs that are still in the queue. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct VerifyEmailJob { + user_email_id: Ulid, + language: Option, +} + +impl VerifyEmailJob { + /// The ID of the email address to verify. + #[must_use] + pub fn user_email_id(&self) -> Ulid { + self.user_email_id + } +} + +impl InsertableJob for VerifyEmailJob { + const QUEUE_NAME: &'static str = "verify-email"; +} + +/// A job to send an email authentication code to a user. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct SendEmailAuthenticationCodeJob { + user_email_authentication_id: Ulid, + language: String, +} + +impl SendEmailAuthenticationCodeJob { + /// Create a new job to send an email authentication code to a user. + #[must_use] + pub fn new(user_email_authentication: &UserEmailAuthentication, language: String) -> Self { + Self { + user_email_authentication_id: user_email_authentication.id, + language, + } + } + + /// The language to use for the email. + #[must_use] + pub fn language(&self) -> &str { + &self.language + } + + /// The ID of the email authentication to send the code for. + #[must_use] + pub fn user_email_authentication_id(&self) -> Ulid { + self.user_email_authentication_id + } +} + +impl InsertableJob for SendEmailAuthenticationCodeJob { + const QUEUE_NAME: &'static str = "send-email-authentication-code"; +} + +/// A job to provision the user on the homeserver. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ProvisionUserJob { + user_id: Ulid, + set_display_name: Option, +} + +impl ProvisionUserJob { + /// Create a new job to provision the user on the homeserver. + #[must_use] + pub fn new(user: &User) -> Self { + Self { + user_id: user.id, + set_display_name: None, + } + } + + #[doc(hidden)] + #[must_use] + pub fn new_for_id(user_id: Ulid) -> Self { + Self { + user_id, + set_display_name: None, + } + } + + /// Set the display name of the user. + #[must_use] + pub fn set_display_name(mut self, display_name: String) -> Self { + self.set_display_name = Some(display_name); + self + } + + /// Get the display name to be set. + #[must_use] + pub fn display_name_to_set(&self) -> Option<&str> { + self.set_display_name.as_deref() + } + + /// The ID of the user to provision. + #[must_use] + pub fn user_id(&self) -> Ulid { + self.user_id + } +} + +impl InsertableJob for ProvisionUserJob { + const QUEUE_NAME: &'static str = "provision-user"; +} + +/// A job to provision a device for a user on the homeserver. +/// +/// This job is deprecated, use the `SyncDevicesJob` instead. It is kept to +/// not break existing jobs in the database. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ProvisionDeviceJob { + user_id: Ulid, + device_id: String, +} + +impl ProvisionDeviceJob { + /// The ID of the user to provision the device for. + #[must_use] + pub fn user_id(&self) -> Ulid { + self.user_id + } + + /// The ID of the device to provision. + #[must_use] + pub fn device_id(&self) -> &str { + &self.device_id + } +} + +impl InsertableJob for ProvisionDeviceJob { + const QUEUE_NAME: &'static str = "provision-device"; +} + +/// A job to delete a device for a user on the homeserver. +/// +/// This job is deprecated, use the `SyncDevicesJob` instead. It is kept to +/// not break existing jobs in the database. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct DeleteDeviceJob { + user_id: Ulid, + device_id: String, +} + +impl DeleteDeviceJob { + /// Create a new job to delete a device for a user on the homeserver. + #[must_use] + pub fn new(user: &User, device: &Device) -> Self { + Self { + user_id: user.id, + device_id: device.as_str().to_owned(), + } + } + + /// The ID of the user to delete the device for. + #[must_use] + pub fn user_id(&self) -> Ulid { + self.user_id + } + + /// The ID of the device to delete. + #[must_use] + pub fn device_id(&self) -> &str { + &self.device_id + } +} + +impl InsertableJob for DeleteDeviceJob { + const QUEUE_NAME: &'static str = "delete-device"; +} + +/// A job which syncs the list of devices of a user with the homeserver +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct SyncDevicesJob { + user_id: Ulid, +} + +impl SyncDevicesJob { + /// Create a new job to sync the list of devices of a user with the + /// homeserver + #[must_use] + pub fn new(user: &User) -> Self { + Self { user_id: user.id } + } + + /// Create a new job to sync the list of devices of a user with the + /// homeserver for the given user ID + /// + /// This is useful to use in cases where the [`User`] object isn't loaded + #[must_use] + pub fn new_for_id(user_id: Ulid) -> Self { + Self { user_id } + } + + /// The ID of the user to sync the devices for + #[must_use] + pub fn user_id(&self) -> Ulid { + self.user_id + } +} + +impl InsertableJob for SyncDevicesJob { + const QUEUE_NAME: &'static str = "sync-devices"; +} + +/// A job to deactivate and lock a user +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct DeactivateUserJob { + user_id: Ulid, + hs_erase: bool, +} + +impl DeactivateUserJob { + /// Create a new job to deactivate and lock a user + /// + /// # Parameters + /// + /// * `user` - The user to deactivate + /// * `hs_erase` - Whether to erase the user from the homeserver + #[must_use] + pub fn new(user: &User, hs_erase: bool) -> Self { + Self { + user_id: user.id, + hs_erase, + } + } + + /// The ID of the user to deactivate + #[must_use] + pub fn user_id(&self) -> Ulid { + self.user_id + } + + /// Whether to erase the user from the homeserver + #[must_use] + pub fn hs_erase(&self) -> bool { + self.hs_erase + } +} + +impl InsertableJob for DeactivateUserJob { + const QUEUE_NAME: &'static str = "deactivate-user"; +} + +/// A job to reactivate a user +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ReactivateUserJob { + user_id: Ulid, +} + +impl ReactivateUserJob { + /// Create a new job to reactivate a user + /// + /// # Parameters + /// + /// * `user` - The user to reactivate + #[must_use] + pub fn new(user: &User) -> Self { + Self { user_id: user.id } + } + + /// The ID of the user to reactivate + #[must_use] + pub fn user_id(&self) -> Ulid { + self.user_id + } +} + +impl InsertableJob for ReactivateUserJob { + const QUEUE_NAME: &'static str = "reactivate-user"; +} + +/// Send account recovery emails +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct SendAccountRecoveryEmailsJob { + user_recovery_session_id: Ulid, +} + +impl SendAccountRecoveryEmailsJob { + /// Create a new job to send account recovery emails + /// + /// # Parameters + /// + /// * `user_recovery_session` - The user recovery session to send the email + /// for + /// * `language` - The locale to send the email in + #[must_use] + pub fn new(user_recovery_session: &UserRecoverySession) -> Self { + Self { + user_recovery_session_id: user_recovery_session.id, + } + } + + /// The ID of the user recovery session to send the email for + #[must_use] + pub fn user_recovery_session_id(&self) -> Ulid { + self.user_recovery_session_id + } +} + +impl InsertableJob for SendAccountRecoveryEmailsJob { + const QUEUE_NAME: &'static str = "send-account-recovery-email"; +} + +/// Cleanup revoked OAuth 2.0 access tokens +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupRevokedOAuthAccessTokensJob; + +impl InsertableJob for CleanupRevokedOAuthAccessTokensJob { + const QUEUE_NAME: &'static str = "cleanup-revoked-oauth-access-tokens"; +} + +/// Cleanup expired OAuth 2.0 access tokens +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupExpiredOAuthAccessTokensJob; + +impl InsertableJob for CleanupExpiredOAuthAccessTokensJob { + const QUEUE_NAME: &'static str = "cleanup-expired-oauth-access-tokens"; +} + +/// Cleanup revoked OAuth 2.0 refresh tokens +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupRevokedOAuthRefreshTokensJob; + +impl InsertableJob for CleanupRevokedOAuthRefreshTokensJob { + const QUEUE_NAME: &'static str = "cleanup-revoked-oauth-refresh-tokens"; +} + +/// Cleanup consumed OAuth 2.0 refresh tokens +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupConsumedOAuthRefreshTokensJob; + +impl InsertableJob for CleanupConsumedOAuthRefreshTokensJob { + const QUEUE_NAME: &'static str = "cleanup-consumed-oauth-refresh-tokens"; +} + +/// Cleanup old user registrations +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupUserRegistrationsJob; + +impl InsertableJob for CleanupUserRegistrationsJob { + const QUEUE_NAME: &'static str = "cleanup-user-registrations"; +} + +/// Cleanup finished compat sessions +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupFinishedCompatSessionsJob; + +impl InsertableJob for CleanupFinishedCompatSessionsJob { + const QUEUE_NAME: &'static str = "cleanup-finished-compat-sessions"; +} + +/// Cleanup finished OAuth 2.0 sessions +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupFinishedOAuth2SessionsJob; + +impl InsertableJob for CleanupFinishedOAuth2SessionsJob { + const QUEUE_NAME: &'static str = "cleanup-finished-oauth2-sessions"; +} + +/// Cleanup finished user/browser sessions +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupFinishedUserSessionsJob; + +impl InsertableJob for CleanupFinishedUserSessionsJob { + const QUEUE_NAME: &'static str = "cleanup-finished-user-sessions"; +} + +/// Cleanup old OAuth 2.0 authorization grants +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupOAuthAuthorizationGrantsJob; + +impl InsertableJob for CleanupOAuthAuthorizationGrantsJob { + const QUEUE_NAME: &'static str = "cleanup-oauth-authorization-grants"; +} + +/// Cleanup old OAuth 2.0 device code grants +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupOAuthDeviceCodeGrantsJob; + +impl InsertableJob for CleanupOAuthDeviceCodeGrantsJob { + const QUEUE_NAME: &'static str = "cleanup-oauth-device-code-grants"; +} + +/// Cleanup old user recovery sessions +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupUserRecoverySessionsJob; + +impl InsertableJob for CleanupUserRecoverySessionsJob { + const QUEUE_NAME: &'static str = "cleanup-user-recovery-sessions"; +} + +/// Cleanup old user email authentications +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupUserEmailAuthenticationsJob; + +impl InsertableJob for CleanupUserEmailAuthenticationsJob { + const QUEUE_NAME: &'static str = "cleanup-user-email-authentications"; +} + +/// Cleanup old pending upstream OAuth authorization sessions +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupUpstreamOAuthSessionsJob; + +impl InsertableJob for CleanupUpstreamOAuthSessionsJob { + const QUEUE_NAME: &'static str = "cleanup-upstream-oauth-sessions"; +} + +/// Cleanup orphaned upstream OAuth links +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupUpstreamOAuthLinksJob; + +impl InsertableJob for CleanupUpstreamOAuthLinksJob { + const QUEUE_NAME: &'static str = "cleanup-upstream-oauth-links"; +} + +/// Cleanup old completed and failed queue jobs +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupQueueJobsJob; + +impl InsertableJob for CleanupQueueJobsJob { + const QUEUE_NAME: &'static str = "cleanup-queue-jobs"; +} + +/// Scheduled job to expire inactive sessions +/// +/// This job will trigger jobs to expire inactive compat, oauth and user +/// sessions. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ExpireInactiveSessionsJob; + +impl InsertableJob for ExpireInactiveSessionsJob { + const QUEUE_NAME: &'static str = "expire-inactive-sessions"; +} + +/// Expire inactive OAuth 2.0 sessions +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ExpireInactiveOAuthSessionsJob { + threshold: DateTime, + after: Option, +} + +impl ExpireInactiveOAuthSessionsJob { + /// Create a new job to expire inactive OAuth 2.0 sessions + /// + /// # Parameters + /// + /// * `threshold` - The threshold to expire sessions at + #[must_use] + pub fn new(threshold: DateTime) -> Self { + Self { + threshold, + after: None, + } + } + + /// Get the threshold to expire sessions at + #[must_use] + pub fn threshold(&self) -> DateTime { + self.threshold + } + + /// Get the pagination cursor + #[must_use] + pub fn pagination(&self, batch_size: usize) -> Pagination { + let pagination = Pagination::first(batch_size); + if let Some(after) = self.after { + pagination.after(after) + } else { + pagination + } + } + + /// Get the next job given the page returned by the database + #[must_use] + pub fn next(&self, page: &Page) -> Option { + if !page.has_next_page { + return None; + } + + let last_edge = page.edges.last()?; + Some(Self { + threshold: self.threshold, + after: Some(last_edge.cursor), + }) + } +} + +impl InsertableJob for ExpireInactiveOAuthSessionsJob { + const QUEUE_NAME: &'static str = "expire-inactive-oauth-sessions"; +} + +/// Expire inactive compatibility sessions +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ExpireInactiveCompatSessionsJob { + threshold: DateTime, + after: Option, +} + +impl ExpireInactiveCompatSessionsJob { + /// Create a new job to expire inactive compatibility sessions + /// + /// # Parameters + /// + /// * `threshold` - The threshold to expire sessions at + #[must_use] + pub fn new(threshold: DateTime) -> Self { + Self { + threshold, + after: None, + } + } + + /// Get the threshold to expire sessions at + #[must_use] + pub fn threshold(&self) -> DateTime { + self.threshold + } + + /// Get the pagination cursor + #[must_use] + pub fn pagination(&self, batch_size: usize) -> Pagination { + let pagination = Pagination::first(batch_size); + if let Some(after) = self.after { + pagination.after(after) + } else { + pagination + } + } + + /// Get the next job given the page returned by the database + #[must_use] + pub fn next(&self, page: &Page) -> Option { + if !page.has_next_page { + return None; + } + + let last_edge = page.edges.last()?; + Some(Self { + threshold: self.threshold, + after: Some(last_edge.cursor), + }) + } +} + +impl InsertableJob for ExpireInactiveCompatSessionsJob { + const QUEUE_NAME: &'static str = "expire-inactive-compat-sessions"; +} + +/// Expire inactive user sessions +#[derive(Debug, Serialize, Deserialize)] +pub struct ExpireInactiveUserSessionsJob { + threshold: DateTime, + after: Option, +} + +impl ExpireInactiveUserSessionsJob { + /// Create a new job to expire inactive user/browser sessions + /// + /// # Parameters + /// + /// * `threshold` - The threshold to expire sessions at + #[must_use] + pub fn new(threshold: DateTime) -> Self { + Self { + threshold, + after: None, + } + } + + /// Get the threshold to expire sessions at + #[must_use] + pub fn threshold(&self) -> DateTime { + self.threshold + } + + /// Get the pagination cursor + #[must_use] + pub fn pagination(&self, batch_size: usize) -> Pagination { + let pagination = Pagination::first(batch_size); + if let Some(after) = self.after { + pagination.after(after) + } else { + pagination + } + } + + /// Get the next job given the page returned by the database + #[must_use] + pub fn next(&self, page: &Page) -> Option { + if !page.has_next_page { + return None; + } + + let last_edge = page.edges.last()?; + Some(Self { + threshold: self.threshold, + after: Some(last_edge.cursor), + }) + } +} + +impl InsertableJob for ExpireInactiveUserSessionsJob { + const QUEUE_NAME: &'static str = "expire-inactive-user-sessions"; +} + +/// Prune stale policy data +#[derive(Debug, Serialize, Deserialize)] +pub struct PruneStalePolicyDataJob; + +impl InsertableJob for PruneStalePolicyDataJob { + const QUEUE_NAME: &'static str = "prune-stale-policy-data"; +} + +/// Cleanup IP addresses from inactive OAuth 2.0 sessions +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupInactiveOAuth2SessionIpsJob; + +impl InsertableJob for CleanupInactiveOAuth2SessionIpsJob { + const QUEUE_NAME: &'static str = "cleanup-inactive-oauth2-session-ips"; +} + +/// Cleanup IP addresses from inactive compat sessions +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupInactiveCompatSessionIpsJob; + +impl InsertableJob for CleanupInactiveCompatSessionIpsJob { + const QUEUE_NAME: &'static str = "cleanup-inactive-compat-session-ips"; +} + +/// Cleanup IP addresses from inactive user/browser sessions +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupInactiveUserSessionIpsJob; + +impl InsertableJob for CleanupInactiveUserSessionIpsJob { + const QUEUE_NAME: &'static str = "cleanup-inactive-user-session-ips"; +} diff --git a/matrix-authentication-service/crates/storage/src/queue/worker.rs b/matrix-authentication-service/crates/storage/src/queue/worker.rs new file mode 100644 index 00000000..c937134f --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/queue/worker.rs @@ -0,0 +1,128 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repository to interact with workers in the job queue + +use async_trait::async_trait; +use chrono::Duration; +use mas_data_model::Clock; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::repository_impl; + +/// A worker is an entity which can execute jobs. +pub struct Worker { + /// The ID of the worker. + pub id: Ulid, +} + +/// A [`QueueWorkerRepository`] is used to schedule jobs to be executed by a +/// worker. +#[async_trait] +pub trait QueueWorkerRepository: Send + Sync { + /// The error type returned by the repository. + type Error; + + /// Register a new worker. + /// + /// Returns a reference to the worker. + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn register( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + ) -> Result; + + /// Send a heartbeat for the given worker. + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails or if the worker was + /// shutdown. + async fn heartbeat(&mut self, clock: &dyn Clock, worker: &Worker) -> Result<(), Self::Error>; + + /// Mark the given worker as shutdown. + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn shutdown(&mut self, clock: &dyn Clock, worker: &Worker) -> Result<(), Self::Error>; + + /// Find dead workers and shut them down. + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn shutdown_dead_workers( + &mut self, + clock: &dyn Clock, + threshold: Duration, + ) -> Result<(), Self::Error>; + + /// Remove the leader lease if it is expired, sending a notification to + /// trigger a new leader election. + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn remove_leader_lease_if_expired( + &mut self, + clock: &dyn Clock, + ) -> Result<(), Self::Error>; + + /// Try to get the leader lease, renewing it if we already have it + /// + /// Returns `true` if we got the leader lease, `false` if we didn't + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails. + async fn try_get_leader_lease( + &mut self, + clock: &dyn Clock, + worker: &Worker, + ) -> Result; +} + +repository_impl!(QueueWorkerRepository: + async fn register( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + ) -> Result; + + async fn heartbeat( + &mut self, + clock: &dyn Clock, + worker: &Worker, + ) -> Result<(), Self::Error>; + + async fn shutdown( + &mut self, + clock: &dyn Clock, + worker: &Worker, + ) -> Result<(), Self::Error>; + + async fn shutdown_dead_workers( + &mut self, + clock: &dyn Clock, + threshold: Duration, + ) -> Result<(), Self::Error>; + + async fn remove_leader_lease_if_expired( + &mut self, + clock: &dyn Clock, + ) -> Result<(), Self::Error>; + + async fn try_get_leader_lease( + &mut self, + clock: &dyn Clock, + worker: &Worker, + ) -> Result; +); diff --git a/matrix-authentication-service/crates/storage/src/repository.rs b/matrix-authentication-service/crates/storage/src/repository.rs new file mode 100644 index 00000000..f6eb191e --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/repository.rs @@ -0,0 +1,674 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use futures_util::future::BoxFuture; +use thiserror::Error; + +use crate::{ + app_session::AppSessionRepository, + compat::{ + CompatAccessTokenRepository, CompatRefreshTokenRepository, CompatSessionRepository, + CompatSsoLoginRepository, + }, + oauth2::{ + OAuth2AccessTokenRepository, OAuth2AuthorizationGrantRepository, OAuth2ClientRepository, + OAuth2DeviceCodeGrantRepository, OAuth2RefreshTokenRepository, OAuth2SessionRepository, + }, + personal::{PersonalAccessTokenRepository, PersonalSessionRepository}, + policy_data::PolicyDataRepository, + queue::{QueueJobRepository, QueueScheduleRepository, QueueWorkerRepository}, + upstream_oauth2::{ + UpstreamOAuthLinkRepository, UpstreamOAuthProviderRepository, + UpstreamOAuthSessionRepository, + }, + user::{ + BrowserSessionRepository, UserEmailRepository, UserPasswordRepository, + UserRecoveryRepository, UserRegistrationRepository, UserRegistrationTokenRepository, + UserRepository, UserTermsRepository, + }, +}; + +/// A [`RepositoryFactory`] is a factory that can create a [`BoxRepository`] +// XXX(quenting): this could be generic over the repository type, but it's annoying to make it +// dyn-safe +#[async_trait] +pub trait RepositoryFactory { + /// Create a new [`BoxRepository`] + async fn create(&self) -> Result; +} + +/// A type-erased [`RepositoryFactory`] +pub type BoxRepositoryFactory = Box; + +/// A [`Repository`] helps interacting with the underlying storage backend. +pub trait Repository: + RepositoryAccess + RepositoryTransaction + Send +where + E: std::error::Error + Send + Sync + 'static, +{ +} + +/// An opaque, type-erased error +#[derive(Debug, Error)] +#[error(transparent)] +pub struct RepositoryError { + source: Box, +} + +impl RepositoryError { + /// Construct a [`RepositoryError`] from any error kind + pub fn from_error(value: E) -> Self + where + E: std::error::Error + Send + Sync + 'static, + { + Self { + source: Box::new(value), + } + } +} + +/// A type-erased [`Repository`] +pub type BoxRepository = Box + Send + Sync + 'static>; + +/// A [`RepositoryTransaction`] can be saved or cancelled, after a series +/// of operations. +pub trait RepositoryTransaction { + /// The error type used by the [`Self::save`] and [`Self::cancel`] functions + type Error; + + /// Commit the transaction + /// + /// # Errors + /// + /// Returns an error if the underlying storage backend failed to commit the + /// transaction. + fn save(self: Box) -> BoxFuture<'static, Result<(), Self::Error>>; + + /// Rollback the transaction + /// + /// # Errors + /// + /// Returns an error if the underlying storage backend failed to rollback + /// the transaction. + fn cancel(self: Box) -> BoxFuture<'static, Result<(), Self::Error>>; +} + +/// Access the various repositories the backend implements. +/// +/// All the methods return a boxed trait object, which can be used to access a +/// particular repository. The lifetime of the returned object is bound to the +/// lifetime of the whole repository, so that only one mutable reference to the +/// repository is used at a time. +/// +/// When adding a new repository, you should add a new method to this trait, and +/// update the implementations for [`crate::MapErr`] and [`Box`] below. +/// +/// Note: this used to have generic associated types to avoid boxing all the +/// repository traits, but that was removed because it made almost impossible to +/// box the trait object. This might be a shortcoming of the initial +/// implementation of generic associated types, and might be fixed in the +/// future. +pub trait RepositoryAccess: Send { + /// The backend-specific error type used by each repository. + type Error: std::error::Error + Send + Sync + 'static; + + /// Get an [`UpstreamOAuthLinkRepository`] + fn upstream_oauth_link<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get an [`UpstreamOAuthProviderRepository`] + fn upstream_oauth_provider<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get an [`UpstreamOAuthSessionRepository`] + fn upstream_oauth_session<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get an [`UserRepository`] + fn user<'c>(&'c mut self) -> Box + 'c>; + + /// Get an [`UserEmailRepository`] + fn user_email<'c>(&'c mut self) -> Box + 'c>; + + /// Get an [`UserPasswordRepository`] + fn user_password<'c>(&'c mut self) + -> Box + 'c>; + + /// Get an [`UserRecoveryRepository`] + fn user_recovery<'c>(&'c mut self) + -> Box + 'c>; + + /// Get an [`UserRegistrationRepository`] + fn user_registration<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get an [`UserRegistrationTokenRepository`] + fn user_registration_token<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get an [`UserTermsRepository`] + fn user_terms<'c>(&'c mut self) -> Box + 'c>; + + /// Get a [`BrowserSessionRepository`] + fn browser_session<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get a [`AppSessionRepository`] + fn app_session<'c>(&'c mut self) -> Box + 'c>; + + /// Get an [`OAuth2ClientRepository`] + fn oauth2_client<'c>(&'c mut self) + -> Box + 'c>; + + /// Get an [`OAuth2AuthorizationGrantRepository`] + fn oauth2_authorization_grant<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get an [`OAuth2SessionRepository`] + fn oauth2_session<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get an [`OAuth2AccessTokenRepository`] + fn oauth2_access_token<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get an [`OAuth2RefreshTokenRepository`] + fn oauth2_refresh_token<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get an [`OAuth2DeviceCodeGrantRepository`] + fn oauth2_device_code_grant<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get a [`CompatSessionRepository`] + fn compat_session<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get a [`CompatSsoLoginRepository`] + fn compat_sso_login<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get a [`CompatAccessTokenRepository`] + fn compat_access_token<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get a [`CompatRefreshTokenRepository`] + fn compat_refresh_token<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get a [`PersonalAccessTokenRepository`] + fn personal_access_token<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get a [`PersonalSessionRepository`] + fn personal_session<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get a [`QueueWorkerRepository`] + fn queue_worker<'c>(&'c mut self) -> Box + 'c>; + + /// Get a [`QueueJobRepository`] + fn queue_job<'c>(&'c mut self) -> Box + 'c>; + + /// Get a [`QueueScheduleRepository`] + fn queue_schedule<'c>( + &'c mut self, + ) -> Box + 'c>; + + /// Get a [`PolicyDataRepository`] + fn policy_data<'c>(&'c mut self) -> Box + 'c>; +} + +/// Implementations of the [`RepositoryAccess`], [`RepositoryTransaction`] and +/// [`Repository`] for the [`crate::MapErr`] wrapper and [`Box`] +mod impls { + use futures_util::{FutureExt, TryFutureExt, future::BoxFuture}; + + use super::RepositoryAccess; + use crate::{ + MapErr, Repository, RepositoryTransaction, + app_session::AppSessionRepository, + compat::{ + CompatAccessTokenRepository, CompatRefreshTokenRepository, CompatSessionRepository, + CompatSsoLoginRepository, + }, + oauth2::{ + OAuth2AccessTokenRepository, OAuth2AuthorizationGrantRepository, + OAuth2ClientRepository, OAuth2DeviceCodeGrantRepository, OAuth2RefreshTokenRepository, + OAuth2SessionRepository, + }, + personal::{PersonalAccessTokenRepository, PersonalSessionRepository}, + policy_data::PolicyDataRepository, + queue::{QueueJobRepository, QueueScheduleRepository, QueueWorkerRepository}, + upstream_oauth2::{ + UpstreamOAuthLinkRepository, UpstreamOAuthProviderRepository, + UpstreamOAuthSessionRepository, + }, + user::{ + BrowserSessionRepository, UserEmailRepository, UserPasswordRepository, + UserRegistrationRepository, UserRegistrationTokenRepository, UserRepository, + UserTermsRepository, + }, + }; + + // --- Repository --- + impl Repository for MapErr + where + R: Repository + RepositoryAccess + RepositoryTransaction, + F: FnMut(E1) -> E2 + Send + Sync + 'static, + E1: std::error::Error + Send + Sync + 'static, + E2: std::error::Error + Send + Sync + 'static, + { + } + + // --- RepositoryTransaction -- + impl RepositoryTransaction for MapErr + where + R: RepositoryTransaction, + R::Error: 'static, + F: FnMut(R::Error) -> E + Send + Sync + 'static, + E: std::error::Error, + { + type Error = E; + + fn save(self: Box) -> BoxFuture<'static, Result<(), Self::Error>> { + Box::new(self.inner).save().map_err(self.mapper).boxed() + } + + fn cancel(self: Box) -> BoxFuture<'static, Result<(), Self::Error>> { + Box::new(self.inner).cancel().map_err(self.mapper).boxed() + } + } + + // --- RepositoryAccess -- + impl RepositoryAccess for MapErr + where + R: RepositoryAccess, + R::Error: 'static, + F: FnMut(R::Error) -> E + Send + Sync + 'static, + E: std::error::Error + Send + Sync + 'static, + { + type Error = E; + + fn upstream_oauth_link<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.upstream_oauth_link(), + &mut self.mapper, + )) + } + + fn upstream_oauth_provider<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.upstream_oauth_provider(), + &mut self.mapper, + )) + } + + fn upstream_oauth_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.upstream_oauth_session(), + &mut self.mapper, + )) + } + + fn user<'c>(&'c mut self) -> Box + 'c> { + Box::new(MapErr::new(self.inner.user(), &mut self.mapper)) + } + + fn user_email<'c>(&'c mut self) -> Box + 'c> { + Box::new(MapErr::new(self.inner.user_email(), &mut self.mapper)) + } + + fn user_password<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.user_password(), &mut self.mapper)) + } + + fn user_recovery<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.user_recovery(), &mut self.mapper)) + } + + fn user_registration<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.user_registration(), + &mut self.mapper, + )) + } + + fn user_registration_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.user_registration_token(), + &mut self.mapper, + )) + } + + fn user_terms<'c>(&'c mut self) -> Box + 'c> { + Box::new(MapErr::new(self.inner.user_terms(), &mut self.mapper)) + } + + fn browser_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.browser_session(), &mut self.mapper)) + } + + fn app_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.app_session(), &mut self.mapper)) + } + + fn oauth2_client<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.oauth2_client(), &mut self.mapper)) + } + + fn oauth2_authorization_grant<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.oauth2_authorization_grant(), + &mut self.mapper, + )) + } + + fn oauth2_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.oauth2_session(), &mut self.mapper)) + } + + fn oauth2_access_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.oauth2_access_token(), + &mut self.mapper, + )) + } + + fn oauth2_refresh_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.oauth2_refresh_token(), + &mut self.mapper, + )) + } + + fn oauth2_device_code_grant<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.oauth2_device_code_grant(), + &mut self.mapper, + )) + } + + fn compat_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.compat_session(), &mut self.mapper)) + } + + fn compat_sso_login<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.compat_sso_login(), &mut self.mapper)) + } + + fn compat_access_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.compat_access_token(), + &mut self.mapper, + )) + } + + fn compat_refresh_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.compat_refresh_token(), + &mut self.mapper, + )) + } + + fn personal_access_token<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new( + self.inner.personal_access_token(), + &mut self.mapper, + )) + } + + fn personal_session<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.personal_session(), &mut self.mapper)) + } + + fn queue_worker<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.queue_worker(), &mut self.mapper)) + } + + fn queue_job<'c>(&'c mut self) -> Box + 'c> { + Box::new(MapErr::new(self.inner.queue_job(), &mut self.mapper)) + } + + fn queue_schedule<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.queue_schedule(), &mut self.mapper)) + } + + fn policy_data<'c>( + &'c mut self, + ) -> Box + 'c> { + Box::new(MapErr::new(self.inner.policy_data(), &mut self.mapper)) + } + } + + impl RepositoryAccess for Box { + type Error = R::Error; + + fn upstream_oauth_link<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).upstream_oauth_link() + } + + fn upstream_oauth_provider<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).upstream_oauth_provider() + } + + fn upstream_oauth_session<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).upstream_oauth_session() + } + + fn user<'c>(&'c mut self) -> Box + 'c> { + (**self).user() + } + + fn user_email<'c>(&'c mut self) -> Box + 'c> { + (**self).user_email() + } + + fn user_password<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).user_password() + } + + fn user_recovery<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).user_recovery() + } + + fn user_registration<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).user_registration() + } + + fn user_registration_token<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).user_registration_token() + } + + fn user_terms<'c>(&'c mut self) -> Box + 'c> { + (**self).user_terms() + } + + fn browser_session<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).browser_session() + } + + fn app_session<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).app_session() + } + + fn oauth2_client<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).oauth2_client() + } + + fn oauth2_authorization_grant<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).oauth2_authorization_grant() + } + + fn oauth2_session<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).oauth2_session() + } + + fn oauth2_access_token<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).oauth2_access_token() + } + + fn oauth2_refresh_token<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).oauth2_refresh_token() + } + + fn oauth2_device_code_grant<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).oauth2_device_code_grant() + } + + fn compat_session<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).compat_session() + } + + fn compat_sso_login<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).compat_sso_login() + } + + fn compat_access_token<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).compat_access_token() + } + + fn compat_refresh_token<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).compat_refresh_token() + } + + fn personal_access_token<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).personal_access_token() + } + + fn personal_session<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).personal_session() + } + + fn queue_worker<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).queue_worker() + } + + fn queue_job<'c>(&'c mut self) -> Box + 'c> { + (**self).queue_job() + } + + fn queue_schedule<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).queue_schedule() + } + + fn policy_data<'c>( + &'c mut self, + ) -> Box + 'c> { + (**self).policy_data() + } + } +} diff --git a/matrix-authentication-service/crates/storage/src/upstream_oauth2/link.rs b/matrix-authentication-service/crates/storage/src/upstream_oauth2/link.rs new file mode 100644 index 00000000..27e3ca67 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/upstream_oauth2/link.rs @@ -0,0 +1,286 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{Clock, UpstreamOAuthLink, UpstreamOAuthProvider, User}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::{Pagination, pagination::Page, repository_impl}; + +/// Filter parameters for listing upstream OAuth links +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct UpstreamOAuthLinkFilter<'a> { + // XXX: we might also want to filter for links without a user linked to them + user: Option<&'a User>, + provider: Option<&'a UpstreamOAuthProvider>, + provider_enabled: Option, + subject: Option<&'a str>, +} + +impl<'a> UpstreamOAuthLinkFilter<'a> { + /// Create a new [`UpstreamOAuthLinkFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the user who owns the upstream OAuth links + #[must_use] + pub fn for_user(mut self, user: &'a User) -> Self { + self.user = Some(user); + self + } + + /// Get the user filter + /// + /// Returns [`None`] if no filter was set + #[must_use] + pub fn user(&self) -> Option<&User> { + self.user + } + + /// Set the upstream OAuth provider for which to list links + #[must_use] + pub fn for_provider(mut self, provider: &'a UpstreamOAuthProvider) -> Self { + self.provider = Some(provider); + self + } + + /// Get the upstream OAuth provider filter + /// + /// Returns [`None`] if no filter was set + #[must_use] + pub fn provider(&self) -> Option<&UpstreamOAuthProvider> { + self.provider + } + + /// Set whether to filter for enabled providers + #[must_use] + pub const fn enabled_providers_only(mut self) -> Self { + self.provider_enabled = Some(true); + self + } + + /// Set whether to filter for disabled providers + #[must_use] + pub const fn disabled_providers_only(mut self) -> Self { + self.provider_enabled = Some(false); + self + } + + /// Get the provider enabled filter + #[must_use] + pub const fn provider_enabled(&self) -> Option { + self.provider_enabled + } + + /// Set the subject filter + #[must_use] + pub const fn for_subject(mut self, subject: &'a str) -> Self { + self.subject = Some(subject); + self + } + + /// Get the subject filter + #[must_use] + pub const fn subject(&self) -> Option<&str> { + self.subject + } +} + +/// An [`UpstreamOAuthLinkRepository`] helps interacting with +/// [`UpstreamOAuthLink`] with the storage backend +#[async_trait] +pub trait UpstreamOAuthLinkRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup an upstream OAuth link by its ID + /// + /// Returns `None` if the link does not exist + /// + /// # Parameters + /// + /// * `id`: The ID of the upstream OAuth link to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find an upstream OAuth link for a provider by its subject + /// + /// Returns `None` if no matching upstream OAuth link was found + /// + /// # Parameters + /// + /// * `upstream_oauth_provider`: The upstream OAuth provider on which to + /// find the link + /// * `subject`: The subject of the upstream OAuth link to find + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_subject( + &mut self, + upstream_oauth_provider: &UpstreamOAuthProvider, + subject: &str, + ) -> Result, Self::Error>; + + /// Add a new upstream OAuth link + /// + /// Returns the newly created upstream OAuth link + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `upsream_oauth_provider`: The upstream OAuth provider for which to + /// create the link + /// * `subject`: The subject of the upstream OAuth link to create + /// * `human_account_name`: A human-readable name for the upstream account + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + upstream_oauth_provider: &UpstreamOAuthProvider, + subject: String, + human_account_name: Option, + ) -> Result; + + /// Associate an upstream OAuth link to a user + /// + /// Returns the updated upstream OAuth link + /// + /// # Parameters + /// + /// * `upstream_oauth_link`: The upstream OAuth link to update + /// * `user`: The user to associate to the upstream OAuth link + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn associate_to_user( + &mut self, + upstream_oauth_link: &UpstreamOAuthLink, + user: &User, + ) -> Result<(), Self::Error>; + + /// List [`UpstreamOAuthLink`] with the given filter and pagination + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: UpstreamOAuthLinkFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count the number of [`UpstreamOAuthLink`] with the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: UpstreamOAuthLinkFilter<'_>) -> Result; + + /// Delete a [`UpstreamOAuthLink`] + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `upstream_oauth_link`: The [`UpstreamOAuthLink`] to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn remove( + &mut self, + clock: &dyn Clock, + upstream_oauth_link: UpstreamOAuthLink, + ) -> Result<(), Self::Error>; + + /// Cleanup orphaned upstream OAuth links + /// + /// This will delete orphaned links (where `user_id IS NULL`) with IDs up to + /// and including `until`. Uses ULID cursor-based pagination for efficiency. + /// + /// Returns the number of links deleted and the cursor for the next batch + /// + /// # Parameters + /// + /// * `since`: The cursor to start from (exclusive), or `None` to start from + /// the beginning + /// * `until`: The maximum ULID to delete (inclusive upper bound) + /// * `limit`: The maximum number of links to delete in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_orphaned( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +} + +repository_impl!(UpstreamOAuthLinkRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn find_by_subject( + &mut self, + upstream_oauth_provider: &UpstreamOAuthProvider, + subject: &str, + ) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + upstream_oauth_provider: &UpstreamOAuthProvider, + subject: String, + human_account_name: Option, + ) -> Result; + + async fn associate_to_user( + &mut self, + upstream_oauth_link: &UpstreamOAuthLink, + user: &User, + ) -> Result<(), Self::Error>; + + async fn list( + &mut self, + filter: UpstreamOAuthLinkFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + async fn count(&mut self, filter: UpstreamOAuthLinkFilter<'_>) -> Result; + + async fn remove(&mut self, clock: &dyn Clock, upstream_oauth_link: UpstreamOAuthLink) -> Result<(), Self::Error>; + + async fn cleanup_orphaned( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/upstream_oauth2/mod.rs b/matrix-authentication-service/crates/storage/src/upstream_oauth2/mod.rs new file mode 100644 index 00000000..39fefffe --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/upstream_oauth2/mod.rs @@ -0,0 +1,20 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repositories to interact with entities related to the upstream OAuth 2.0 +//! providers + +mod link; +mod provider; +mod session; + +pub use self::{ + link::{UpstreamOAuthLinkFilter, UpstreamOAuthLinkRepository}, + provider::{ + UpstreamOAuthProviderFilter, UpstreamOAuthProviderParams, UpstreamOAuthProviderRepository, + }, + session::{UpstreamOAuthSessionFilter, UpstreamOAuthSessionRepository}, +}; diff --git a/matrix-authentication-service/crates/storage/src/upstream_oauth2/provider.rs b/matrix-authentication-service/crates/storage/src/upstream_oauth2/provider.rs new file mode 100644 index 00000000..256a7496 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/upstream_oauth2/provider.rs @@ -0,0 +1,328 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::marker::PhantomData; + +use async_trait::async_trait; +use mas_data_model::{ + Clock, UpstreamOAuthProvider, UpstreamOAuthProviderClaimsImports, + UpstreamOAuthProviderDiscoveryMode, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderPkceMode, UpstreamOAuthProviderResponseMode, + UpstreamOAuthProviderTokenAuthMethod, +}; +use mas_iana::jose::JsonWebSignatureAlg; +use oauth2_types::scope::Scope; +use rand_core::RngCore; +use ulid::Ulid; +use url::Url; + +use crate::{Pagination, pagination::Page, repository_impl}; + +/// Structure which holds parameters when inserting or updating an upstream +/// OAuth 2.0 provider +pub struct UpstreamOAuthProviderParams { + /// The OIDC issuer of the provider + pub issuer: Option, + + /// A human-readable name for the provider + pub human_name: Option, + + /// A brand identifier, e.g. "apple" or "google" + pub brand_name: Option, + + /// The scope to request during the authorization flow + pub scope: Scope, + + /// The token endpoint authentication method + pub token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod, + + /// The JWT signing algorithm to use when then `client_secret_jwt` or + /// `private_key_jwt` authentication methods are used + pub token_endpoint_signing_alg: Option, + + /// Expected signature for the JWT payload returned by the token + /// authentication endpoint. + /// + /// Defaults to `RS256`. + pub id_token_signed_response_alg: JsonWebSignatureAlg, + + /// Whether to fetch the user profile from the userinfo endpoint, + /// or to rely on the data returned in the `id_token` from the + /// `token_endpoint`. + pub fetch_userinfo: bool, + + /// Expected signature for the JWT payload returned by the userinfo + /// endpoint. + /// + /// If not specified, the response is expected to be an unsigned JSON + /// payload. Defaults to `None`. + pub userinfo_signed_response_alg: Option, + + /// The client ID to use when authenticating to the upstream + pub client_id: String, + + /// The encrypted client secret to use when authenticating to the upstream + pub encrypted_client_secret: Option, + + /// How claims should be imported from the upstream provider + pub claims_imports: UpstreamOAuthProviderClaimsImports, + + /// The URL to use as the authorization endpoint. If `None`, the URL will be + /// discovered + pub authorization_endpoint_override: Option, + + /// The URL to use as the token endpoint. If `None`, the URL will be + /// discovered + pub token_endpoint_override: Option, + + /// The URL to use as the userinfo endpoint. If `None`, the URL will be + /// discovered + pub userinfo_endpoint_override: Option, + + /// The URL to use when fetching JWKS. If `None`, the URL will be discovered + pub jwks_uri_override: Option, + + /// How the provider metadata should be discovered + pub discovery_mode: UpstreamOAuthProviderDiscoveryMode, + + /// How should PKCE be used + pub pkce_mode: UpstreamOAuthProviderPkceMode, + + /// What response mode it should ask + pub response_mode: Option, + + /// Additional parameters to include in the authorization request + pub additional_authorization_parameters: Vec<(String, String)>, + + /// Whether to forward the login hint to the upstream provider. + pub forward_login_hint: bool, + + /// The position of the provider in the UI + pub ui_order: i32, + + /// The behavior when receiving a backchannel logout notification + pub on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout, +} + +/// Filter parameters for listing upstream OAuth 2.0 providers +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct UpstreamOAuthProviderFilter<'a> { + /// Filter by whether the provider is enabled + /// + /// If `None`, all providers are returned + enabled: Option, + + _lifetime: PhantomData<&'a ()>, +} + +impl UpstreamOAuthProviderFilter<'_> { + /// Create a new [`UpstreamOAuthProviderFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Return only enabled providers + #[must_use] + pub const fn enabled_only(mut self) -> Self { + self.enabled = Some(true); + self + } + + /// Return only disabled providers + #[must_use] + pub const fn disabled_only(mut self) -> Self { + self.enabled = Some(false); + self + } + + /// Get the enabled filter + /// + /// Returns `None` if the filter is not set + #[must_use] + pub const fn enabled(&self) -> Option { + self.enabled + } +} + +/// An [`UpstreamOAuthProviderRepository`] helps interacting with +/// [`UpstreamOAuthProvider`] saved in the storage backend +#[async_trait] +pub trait UpstreamOAuthProviderRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup an upstream OAuth provider by its ID + /// + /// Returns `None` if the provider was not found + /// + /// # Parameters + /// + /// * `id`: The ID of the provider to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Add a new upstream OAuth provider + /// + /// Returns the newly created provider + /// + /// # Parameters + /// + /// * `rng`: A random number generator + /// * `clock`: The clock used to generate timestamps + /// * `params`: The parameters of the provider to add + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + params: UpstreamOAuthProviderParams, + ) -> Result; + + /// Delete an upstream OAuth provider + /// + /// # Parameters + /// + /// * `provider`: The provider to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn delete(&mut self, provider: UpstreamOAuthProvider) -> Result<(), Self::Error> { + self.delete_by_id(provider.id).await + } + + /// Delete an upstream OAuth provider by its ID + /// + /// # Parameters + /// + /// * `id`: The ID of the provider to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn delete_by_id(&mut self, id: Ulid) -> Result<(), Self::Error>; + + /// Insert or update an upstream OAuth provider + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `id`: The ID of the provider to update + /// * `params`: The parameters of the provider to update + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn upsert( + &mut self, + clock: &dyn Clock, + id: Ulid, + params: UpstreamOAuthProviderParams, + ) -> Result; + + /// Disable an upstream OAuth provider + /// + /// Returns the disabled provider + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `provider`: The provider to disable + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn disable( + &mut self, + clock: &dyn Clock, + provider: UpstreamOAuthProvider, + ) -> Result; + + /// List [`UpstreamOAuthProvider`] with the given filter and pagination + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: UpstreamOAuthProviderFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count the number of [`UpstreamOAuthProvider`] with the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count( + &mut self, + filter: UpstreamOAuthProviderFilter<'_>, + ) -> Result; + + /// Get all enabled upstream OAuth providers + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn all_enabled(&mut self) -> Result, Self::Error>; +} + +repository_impl!(UpstreamOAuthProviderRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + params: UpstreamOAuthProviderParams + ) -> Result; + + async fn upsert( + &mut self, + clock: &dyn Clock, + id: Ulid, + params: UpstreamOAuthProviderParams + ) -> Result; + + async fn delete(&mut self, provider: UpstreamOAuthProvider) -> Result<(), Self::Error>; + + async fn delete_by_id(&mut self, id: Ulid) -> Result<(), Self::Error>; + + async fn disable( + &mut self, + clock: &dyn Clock, + provider: UpstreamOAuthProvider + ) -> Result; + + async fn list( + &mut self, + filter: UpstreamOAuthProviderFilter<'_>, + pagination: Pagination + ) -> Result, Self::Error>; + + async fn count( + &mut self, + filter: UpstreamOAuthProviderFilter<'_> + ) -> Result; + + async fn all_enabled(&mut self) -> Result, Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/upstream_oauth2/session.rs b/matrix-authentication-service/crates/storage/src/upstream_oauth2/session.rs new file mode 100644 index 00000000..6e017ca4 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/upstream_oauth2/session.rs @@ -0,0 +1,288 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{ + BrowserSession, Clock, UpstreamOAuthAuthorizationSession, UpstreamOAuthLink, + UpstreamOAuthProvider, +}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::{Pagination, pagination::Page, repository_impl}; + +/// Filter parameters for listing upstream OAuth sessions +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct UpstreamOAuthSessionFilter<'a> { + provider: Option<&'a UpstreamOAuthProvider>, + sub_claim: Option<&'a str>, + sid_claim: Option<&'a str>, +} + +impl<'a> UpstreamOAuthSessionFilter<'a> { + /// Create a new [`UpstreamOAuthSessionFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the upstream OAuth provider for which to list sessions + #[must_use] + pub fn for_provider(mut self, provider: &'a UpstreamOAuthProvider) -> Self { + self.provider = Some(provider); + self + } + + /// Get the upstream OAuth provider filter + /// + /// Returns [`None`] if no filter was set + #[must_use] + pub fn provider(&self) -> Option<&UpstreamOAuthProvider> { + self.provider + } + + /// Set the `sub` claim to filter by + #[must_use] + pub fn with_sub_claim(mut self, sub_claim: &'a str) -> Self { + self.sub_claim = Some(sub_claim); + self + } + + /// Get the `sub` claim filter + /// + /// Returns [`None`] if no filter was set + #[must_use] + pub fn sub_claim(&self) -> Option<&str> { + self.sub_claim + } + + /// Set the `sid` claim to filter by + #[must_use] + pub fn with_sid_claim(mut self, sid_claim: &'a str) -> Self { + self.sid_claim = Some(sid_claim); + self + } + + /// Get the `sid` claim filter + /// + /// Returns [`None`] if no filter was set + #[must_use] + pub fn sid_claim(&self) -> Option<&str> { + self.sid_claim + } +} + +/// An [`UpstreamOAuthSessionRepository`] helps interacting with +/// [`UpstreamOAuthAuthorizationSession`] saved in the storage backend +#[async_trait] +pub trait UpstreamOAuthSessionRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a session by its ID + /// + /// Returns `None` if the session does not exist + /// + /// # Parameters + /// + /// * `id`: the ID of the session to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup( + &mut self, + id: Ulid, + ) -> Result, Self::Error>; + + /// Add a session to the database + /// + /// Returns the newly created session + /// + /// # Parameters + /// + /// * `rng`: the random number generator to use + /// * `clock`: the clock source + /// * `upstream_oauth_provider`: the upstream OAuth provider for which to + /// create the session + /// * `state`: the authorization grant `state` parameter sent to the + /// upstream OAuth provider + /// * `code_challenge_verifier`: the code challenge verifier used in this + /// session, if PKCE is being used + /// * `nonce`: the `nonce` used in this session if in OIDC mode + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + upstream_oauth_provider: &UpstreamOAuthProvider, + state: String, + code_challenge_verifier: Option, + nonce: Option, + ) -> Result; + + /// Mark a session as completed and associate the given link + /// + /// Returns the updated session + /// + /// # Parameters + /// + /// * `clock`: the clock source + /// * `upstream_oauth_authorization_session`: the session to update + /// * `upstream_oauth_link`: the link to associate with the session + /// * `id_token`: the ID token returned by the upstream OAuth provider, if + /// present + /// * `id_token_claims`: the claims contained in the ID token, if present + /// * `extra_callback_parameters`: the extra query parameters returned in + /// the callback, if any + /// * `userinfo`: the user info returned by the upstream OAuth provider, if + /// requested + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + #[expect(clippy::too_many_arguments)] + async fn complete_with_link( + &mut self, + clock: &dyn Clock, + upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, + upstream_oauth_link: &UpstreamOAuthLink, + id_token: Option, + id_token_claims: Option, + extra_callback_parameters: Option, + userinfo: Option, + ) -> Result; + + /// Mark a session as consumed + /// + /// Returns the updated session + /// + /// # Parameters + /// + /// * `clock`: the clock source + /// * `upstream_oauth_authorization_session`: the session to consume + /// * `browser_session`: the browser session that was authenticated with + /// this authorization session + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn consume( + &mut self, + clock: &dyn Clock, + upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, + browser_session: &BrowserSession, + ) -> Result; + + /// List [`UpstreamOAuthAuthorizationSession`] with the given filter and + /// pagination + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: UpstreamOAuthSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count the number of [`UpstreamOAuthAuthorizationSession`] with the given + /// filter + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: UpstreamOAuthSessionFilter<'_>) + -> Result; + + /// Cleanup old authorization sessions that are not linked to a user session + /// + /// This will delete sessions with IDs up to and including `until`. + /// Authorization sessions with a user session linked must be kept around to + /// avoid breaking features like OIDC Backchannel Logout. + /// + /// Returns the number of sessions deleted and the cursor for the next batch + /// + /// # Parameters + /// + /// * `since`: The cursor to start from (exclusive), or `None` to start from + /// the beginning + /// * `until`: The maximum ULID to delete (inclusive upper bound) + /// * `limit`: The maximum number of sessions to delete in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_orphaned( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +} + +repository_impl!(UpstreamOAuthSessionRepository: + async fn lookup( + &mut self, + id: Ulid, + ) -> Result, Self::Error>; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + upstream_oauth_provider: &UpstreamOAuthProvider, + state: String, + code_challenge_verifier: Option, + nonce: Option, + ) -> Result; + + async fn complete_with_link( + &mut self, + clock: &dyn Clock, + upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, + upstream_oauth_link: &UpstreamOAuthLink, + id_token: Option, + id_token_claims: Option, + extra_callback_parameters: Option, + userinfo: Option, + ) -> Result; + + async fn consume( + &mut self, + clock: &dyn Clock, + upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, + browser_session: &BrowserSession, + ) -> Result; + + async fn list( + &mut self, + filter: UpstreamOAuthSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + async fn count(&mut self, filter: UpstreamOAuthSessionFilter<'_>) -> Result; + + async fn cleanup_orphaned( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/user/email.rs b/matrix-authentication-service/crates/storage/src/user/email.rs new file mode 100644 index 00000000..eadf3dfb --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/user/email.rs @@ -0,0 +1,446 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{ + BrowserSession, Clock, UpstreamOAuthAuthorizationSession, User, UserEmail, + UserEmailAuthentication, UserEmailAuthenticationCode, UserRegistration, +}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::{Pagination, pagination::Page, repository_impl}; + +/// Filter parameters for listing user emails +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct UserEmailFilter<'a> { + user: Option<&'a User>, + email: Option<&'a str>, +} + +impl<'a> UserEmailFilter<'a> { + /// Create a new [`UserEmailFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Filter for emails of a specific user + #[must_use] + pub fn for_user(mut self, user: &'a User) -> Self { + self.user = Some(user); + self + } + + /// Filter for emails matching a specific email address + /// + /// The email address is case-insensitive + #[must_use] + pub fn for_email(mut self, email: &'a str) -> Self { + self.email = Some(email); + self + } + + /// Get the user filter + /// + /// Returns [`None`] if no user filter is set + #[must_use] + pub fn user(&self) -> Option<&User> { + self.user + } + + /// Get the email filter + /// + /// Returns [`None`] if no email filter is set + #[must_use] + pub fn email(&self) -> Option<&str> { + self.email + } +} + +/// A [`UserEmailRepository`] helps interacting with [`UserEmail`] saved in the +/// storage backend +#[async_trait] +pub trait UserEmailRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup an [`UserEmail`] by its ID + /// + /// Returns `None` if no [`UserEmail`] was found + /// + /// # Parameters + /// + /// * `id`: The ID of the [`UserEmail`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Lookup an [`UserEmail`] by its email address for a [`User`] + /// + /// The email address is case-insensitive + /// + /// Returns `None` if no matching [`UserEmail`] was found + /// + /// # Parameters + /// + /// * `user`: The [`User`] for whom to lookup the [`UserEmail`] + /// * `email`: The email address to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find(&mut self, user: &User, email: &str) -> Result, Self::Error>; + + /// Lookup an [`UserEmail`] by its email address + /// + /// The email address is case-insensitive + /// + /// Returns `None` if no matching [`UserEmail`] was found or if multiple + /// [`UserEmail`] are found + /// + /// # Parameters + /// * `email`: The email address to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_email(&mut self, email: &str) -> Result, Self::Error>; + + /// Get all [`UserEmail`] of a [`User`] + /// + /// # Parameters + /// + /// * `user`: The [`User`] for whom to lookup the [`UserEmail`] + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn all(&mut self, user: &User) -> Result, Self::Error>; + + /// List [`UserEmail`] with the given filter and pagination + /// + /// # Parameters + /// + /// * `filter`: The filter parameters + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: UserEmailFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count the [`UserEmail`] with the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: UserEmailFilter<'_>) -> Result; + + /// Create a new [`UserEmail`] for a [`User`] + /// + /// Returns the newly created [`UserEmail`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock to use + /// * `user`: The [`User`] for whom to create the [`UserEmail`] + /// * `email`: The email address of the [`UserEmail`] + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + email: String, + ) -> Result; + + /// Delete a [`UserEmail`] + /// + /// # Parameters + /// + /// * `user_email`: The [`UserEmail`] to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn remove(&mut self, user_email: UserEmail) -> Result<(), Self::Error>; + + /// Delete all [`UserEmail`] with the given filter + /// + /// Returns the number of deleted [`UserEmail`]s + /// + /// # Parameters + /// + /// * `filter`: The filter parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn remove_bulk(&mut self, filter: UserEmailFilter<'_>) -> Result; + + /// Add a new [`UserEmailAuthentication`] for a [`BrowserSession`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock to use + /// * `email`: The email address to add + /// * `session`: The [`BrowserSession`] for which to add the + /// [`UserEmailAuthentication`] + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails + async fn add_authentication_for_session( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + email: String, + session: &BrowserSession, + ) -> Result; + + /// Add a new [`UserEmailAuthentication`] for a [`UserRegistration`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock to use + /// * `email`: The email address to add + /// * `registration`: The [`UserRegistration`] for which to add the + /// [`UserEmailAuthentication`] + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails + async fn add_authentication_for_registration( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + email: String, + registration: &UserRegistration, + ) -> Result; + + /// Add a new [`UserEmailAuthenticationCode`] for a + /// [`UserEmailAuthentication`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock to use + /// * `duration`: The duration for which the code is valid + /// * `authentication`: The [`UserEmailAuthentication`] for which to add the + /// [`UserEmailAuthenticationCode`] + /// * `code`: The code to add + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails or if the code + /// already exists for this session + async fn add_authentication_code( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + duration: chrono::Duration, + authentication: &UserEmailAuthentication, + code: String, + ) -> Result; + + /// Lookup a [`UserEmailAuthentication`] + /// + /// # Parameters + /// + /// * `id`: The ID of the [`UserEmailAuthentication`] to lookup + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails + async fn lookup_authentication( + &mut self, + id: Ulid, + ) -> Result, Self::Error>; + + /// Find a [`UserEmailAuthenticationCode`] by its code and session + /// + /// # Parameters + /// + /// * `authentication`: The [`UserEmailAuthentication`] to find the code for + /// * `code`: The code of the [`UserEmailAuthentication`] to lookup + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails + async fn find_authentication_code( + &mut self, + authentication: &UserEmailAuthentication, + code: &str, + ) -> Result, Self::Error>; + + /// Complete a [`UserEmailAuthentication`] by using the given code + /// + /// Returns the completed [`UserEmailAuthentication`] + /// + /// # Parameters + /// + /// * `clock`: The clock to use to generate timestamps + /// * `authentication`: The [`UserEmailAuthentication`] to complete + /// * `code`: The [`UserEmailAuthenticationCode`] to use + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails + async fn complete_authentication_with_code( + &mut self, + clock: &dyn Clock, + authentication: UserEmailAuthentication, + code: &UserEmailAuthenticationCode, + ) -> Result; + + /// Complete a [`UserEmailAuthentication`] by using the given upstream oauth + /// authorization session + /// + /// Returns the completed [`UserEmailAuthentication`] + /// + /// # Parameters + /// + /// * `clock`: The clock to use to generate timestamps + /// * `authentication`: The [`UserEmailAuthentication`] to complete + /// * `upstream_oauth_authorization_session`: The + /// [`UpstreamOAuthAuthorizationSession`] to use + /// + /// # Errors + /// + /// Returns an error if the underlying repository fails + async fn complete_authentication_with_upstream( + &mut self, + clock: &dyn Clock, + authentication: UserEmailAuthentication, + upstream_oauth_authorization_session: &UpstreamOAuthAuthorizationSession, + ) -> Result; + + /// Cleanup old email authentications + /// + /// This will delete email authentications with IDs up to and including + /// `until`. Uses ULID cursor-based pagination for efficiency. + /// Authentication codes will cascade-delete automatically. + /// + /// Returns the number of authentications deleted and the cursor for the + /// next batch + /// + /// # Parameters + /// + /// * `since`: The cursor to start from (exclusive), or `None` to start from + /// the beginning + /// * `until`: The maximum ULID to delete (inclusive upper bound) + /// * `limit`: The maximum number of authentications to delete in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_authentications( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +} + +repository_impl!(UserEmailRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + async fn find(&mut self, user: &User, email: &str) -> Result, Self::Error>; + async fn find_by_email(&mut self, email: &str) -> Result, Self::Error>; + + async fn all(&mut self, user: &User) -> Result, Self::Error>; + async fn list( + &mut self, + filter: UserEmailFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + async fn count(&mut self, filter: UserEmailFilter<'_>) -> Result; + + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + email: String, + ) -> Result; + async fn remove(&mut self, user_email: UserEmail) -> Result<(), Self::Error>; + + async fn remove_bulk(&mut self, filter: UserEmailFilter<'_>) -> Result; + + async fn add_authentication_for_session( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + email: String, + session: &BrowserSession, + ) -> Result; + + async fn add_authentication_for_registration( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + email: String, + registration: &UserRegistration, + ) -> Result; + + async fn add_authentication_code( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + duration: chrono::Duration, + authentication: &UserEmailAuthentication, + code: String, + ) -> Result; + + async fn lookup_authentication( + &mut self, + id: Ulid, + ) -> Result, Self::Error>; + + async fn find_authentication_code( + &mut self, + authentication: &UserEmailAuthentication, + code: &str, + ) -> Result, Self::Error>; + + async fn complete_authentication_with_code( + &mut self, + clock: &dyn Clock, + authentication: UserEmailAuthentication, + code: &UserEmailAuthenticationCode, + ) -> Result; + + async fn complete_authentication_with_upstream( + &mut self, + clock: &dyn Clock, + authentication: UserEmailAuthentication, + upstream_oauth_authorization_session: &UpstreamOAuthAuthorizationSession, + ) -> Result; + + async fn cleanup_authentications( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/user/mod.rs b/matrix-authentication-service/crates/storage/src/user/mod.rs new file mode 100644 index 00000000..025170fa --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/user/mod.rs @@ -0,0 +1,402 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Repositories to interact with entities related to user accounts + +use async_trait::async_trait; +use mas_data_model::{Clock, User}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::{Page, Pagination, repository_impl}; + +mod email; +mod password; +mod recovery; +mod registration; +mod registration_token; +mod session; +mod terms; + +pub use self::{ + email::{UserEmailFilter, UserEmailRepository}, + password::UserPasswordRepository, + recovery::UserRecoveryRepository, + registration::UserRegistrationRepository, + registration_token::{UserRegistrationTokenFilter, UserRegistrationTokenRepository}, + session::{BrowserSessionFilter, BrowserSessionRepository}, + terms::UserTermsRepository, +}; + +/// The state of a user account +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum UserState { + /// The account is deactivated, it has the `deactivated_at` timestamp set + Deactivated, + + /// The account is locked, it has the `locked_at` timestamp set + Locked, + + /// The account is active + Active, +} + +impl UserState { + /// Returns `true` if the user state is [`Locked`]. + /// + /// [`Locked`]: UserState::Locked + #[must_use] + pub fn is_locked(&self) -> bool { + matches!(self, Self::Locked) + } + + /// Returns `true` if the user state is [`Deactivated`]. + /// + /// [`Deactivated`]: UserState::Deactivated + #[must_use] + pub fn is_deactivated(&self) -> bool { + matches!(self, Self::Deactivated) + } + + /// Returns `true` if the user state is [`Active`]. + /// + /// [`Active`]: UserState::Active + #[must_use] + pub fn is_active(&self) -> bool { + matches!(self, Self::Active) + } +} + +/// Filter parameters for listing users +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct UserFilter<'a> { + state: Option, + can_request_admin: Option, + is_guest: Option, + search: Option<&'a str>, +} + +impl<'a> UserFilter<'a> { + /// Create a new [`UserFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Filter for active users + #[must_use] + pub fn active_only(mut self) -> Self { + self.state = Some(UserState::Active); + self + } + + /// Filter for locked users + #[must_use] + pub fn locked_only(mut self) -> Self { + self.state = Some(UserState::Locked); + self + } + + /// Filter for deactivated users + #[must_use] + pub fn deactivated_only(mut self) -> Self { + self.state = Some(UserState::Deactivated); + self + } + + /// Filter for users that can request admin privileges + #[must_use] + pub fn can_request_admin_only(mut self) -> Self { + self.can_request_admin = Some(true); + self + } + + /// Filter for users that can't request admin privileges + #[must_use] + pub fn cannot_request_admin_only(mut self) -> Self { + self.can_request_admin = Some(false); + self + } + + /// Filter for guest users + #[must_use] + pub fn guest_only(mut self) -> Self { + self.is_guest = Some(true); + self + } + + /// Filter for non-guest users + #[must_use] + pub fn non_guest_only(mut self) -> Self { + self.is_guest = Some(false); + self + } + + /// Filter for users that match the given search string + #[must_use] + pub fn matching_search(mut self, search: &'a str) -> Self { + self.search = Some(search); + self + } + + /// Get the state filter + /// + /// Returns [`None`] if no state filter was set + #[must_use] + pub fn state(&self) -> Option { + self.state + } + + /// Get the can request admin filter + /// + /// Returns [`None`] if no can request admin filter was set + #[must_use] + pub fn can_request_admin(&self) -> Option { + self.can_request_admin + } + + /// Get the is guest filter + /// + /// Returns [`None`] if no is guest filter was set + #[must_use] + pub fn is_guest(&self) -> Option { + self.is_guest + } + + /// Get the search filter + /// + /// Returns [`None`] if no search filter was set + #[must_use] + pub fn search(&self) -> Option<&'a str> { + self.search + } +} + +/// A [`UserRepository`] helps interacting with [`User`] saved in the storage +/// backend +#[async_trait] +pub trait UserRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a [`User`] by its ID + /// + /// Returns `None` if no [`User`] was found + /// + /// # Parameters + /// + /// * `id`: The ID of the [`User`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Find a [`User`] by its username, in a case-insensitive manner + /// + /// Returns `None` if no [`User`] was found + /// + /// # Parameters + /// + /// * `username`: The username of the [`User`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_username(&mut self, username: &str) -> Result, Self::Error>; + + /// Create a new [`User`] + /// + /// Returns the newly created [`User`] + /// + /// # Parameters + /// + /// * `rng`: A random number generator to generate the [`User`] ID + /// * `clock`: The clock used to generate timestamps + /// * `username`: The username of the [`User`] + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + username: String, + ) -> Result; + + /// Check if a [`User`] exists + /// + /// Returns `true` if the [`User`] exists, `false` otherwise + /// + /// # Parameters + /// + /// * `username`: The username of the [`User`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn exists(&mut self, username: &str) -> Result; + + /// Lock a [`User`] + /// + /// Returns the locked [`User`] + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `user`: The [`User`] to lock + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lock(&mut self, clock: &dyn Clock, user: User) -> Result; + + /// Unlock a [`User`] + /// + /// Returns the unlocked [`User`] + /// + /// # Parameters + /// + /// * `user`: The [`User`] to unlock + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn unlock(&mut self, user: User) -> Result; + + /// Deactivate a [`User`] + /// + /// Returns the deactivated [`User`] + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `user`: The [`User`] to deactivate + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn deactivate(&mut self, clock: &dyn Clock, user: User) -> Result; + + /// Reactivate a [`User`] + /// + /// Returns the reactivated [`User`] + /// + /// # Parameters + /// + /// * `user`: The [`User`] to reactivate + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn reactivate(&mut self, user: User) -> Result; + + /// Delete all the unsupported third-party IDs of a [`User`]. + /// + /// Those were imported by syn2mas and kept in case we wanted to support + /// them later. They still need to be cleaned up when a user deactivates + /// their account. + /// + /// Returns the number of deleted third-party IDs. + /// + /// # Parameters + /// + /// * `user`: The [`User`] whose unsupported third-party IDs should be + /// deleted + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn delete_unsupported_threepids(&mut self, user: &User) -> Result; + + /// Set whether a [`User`] can request admin + /// + /// Returns the [`User`] with the new `can_request_admin` value + /// + /// # Parameters + /// + /// * `user`: The [`User`] to update + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn set_can_request_admin( + &mut self, + user: User, + can_request_admin: bool, + ) -> Result; + + /// List [`User`] with the given filter and pagination + /// + /// # Parameters + /// + /// * `filter`: The filter parameters + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: UserFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count the [`User`] with the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: UserFilter<'_>) -> Result; + + /// Acquire a lock on the user to make sure device operations are done in a + /// sequential way. The lock is released when the repository is saved or + /// rolled back. + /// + /// # Parameters + /// + /// * `user`: The user to lock + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn acquire_lock_for_sync(&mut self, user: &User) -> Result<(), Self::Error>; +} + +repository_impl!(UserRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + async fn find_by_username(&mut self, username: &str) -> Result, Self::Error>; + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + username: String, + ) -> Result; + async fn exists(&mut self, username: &str) -> Result; + async fn lock(&mut self, clock: &dyn Clock, user: User) -> Result; + async fn unlock(&mut self, user: User) -> Result; + async fn deactivate(&mut self, clock: &dyn Clock, user: User) -> Result; + async fn reactivate(&mut self, user: User) -> Result; + async fn delete_unsupported_threepids(&mut self, user: &User) -> Result; + async fn set_can_request_admin( + &mut self, + user: User, + can_request_admin: bool, + ) -> Result; + async fn list( + &mut self, + filter: UserFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + async fn count(&mut self, filter: UserFilter<'_>) -> Result; + async fn acquire_lock_for_sync(&mut self, user: &User) -> Result<(), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/user/password.rs b/matrix-authentication-service/crates/storage/src/user/password.rs new file mode 100644 index 00000000..fff0298e --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/user/password.rs @@ -0,0 +1,71 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{Clock, Password, User}; +use rand_core::RngCore; + +use crate::repository_impl; + +/// A [`UserPasswordRepository`] helps interacting with [`Password`] saved in +/// the storage backend +#[async_trait] +pub trait UserPasswordRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Get the active password for a user + /// + /// Returns `None` if the user has no password set + /// + /// # Parameters + /// + /// * `user`: The user to get the password for + /// + /// # Errors + /// + /// Returns [`Self::Error`] if underlying repository fails + async fn active(&mut self, user: &User) -> Result, Self::Error>; + + /// Set a new password for a user + /// + /// Returns the newly created [`Password`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `user`: The user to set the password for + /// * `version`: The version of the hashing scheme used + /// * `hashed_password`: The hashed password + /// * `upgraded_from`: The password this password was upgraded from, if any + /// + /// # Errors + /// + /// Returns [`Self::Error`] if underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + version: u16, + hashed_password: String, + upgraded_from: Option<&Password>, + ) -> Result; +} + +repository_impl!(UserPasswordRepository: + async fn active(&mut self, user: &User) -> Result, Self::Error>; + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + version: u16, + hashed_password: String, + upgraded_from: Option<&Password>, + ) -> Result; +); diff --git a/matrix-authentication-service/crates/storage/src/user/recovery.rs b/matrix-authentication-service/crates/storage/src/user/recovery.rs new file mode 100644 index 00000000..47c6bef5 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/user/recovery.rs @@ -0,0 +1,192 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use mas_data_model::{Clock, UserEmail, UserRecoverySession, UserRecoveryTicket}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::repository_impl; + +/// A [`UserRecoveryRepository`] helps interacting with [`UserRecoverySession`] +/// and [`UserRecoveryTicket`] saved in the storage backend +#[async_trait] +pub trait UserRecoveryRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup an [`UserRecoverySession`] by its ID + /// + /// Returns `None` if no [`UserRecoverySession`] was found + /// + /// # Parameters + /// + /// * `id`: The ID of the [`UserRecoverySession`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup_session( + &mut self, + id: Ulid, + ) -> Result, Self::Error>; + + /// Create a new [`UserRecoverySession`] for the given email + /// + /// Returns the newly created [`UserRecoverySession`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock to use + /// * `email`: The email to create the session for + /// * `user_agent`: The user agent of the browser which initiated the + /// session + /// * `ip_address`: The IP address of the browser which initiated the + /// session, if known + /// * `locale`: The locale of the browser which initiated the session + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add_session( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + email: String, + user_agent: String, + ip_address: Option, + locale: String, + ) -> Result; + + /// Find a [`UserRecoveryTicket`] by its ticket + /// + /// Returns `None` if no [`UserRecoveryTicket`] was found + /// + /// # Parameters + /// + /// * `ticket`: The ticket of the [`UserRecoveryTicket`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_ticket( + &mut self, + ticket: &str, + ) -> Result, Self::Error>; + + /// Add a [`UserRecoveryTicket`] to the given [`UserRecoverySession`] for + /// the given [`UserEmail`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock to use + /// * `session`: The [`UserRecoverySession`] to add the ticket to + /// * `user_email`: The [`UserEmail`] to add the ticket for + /// * `ticket`: The ticket to add + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add_ticket( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user_recovery_session: &UserRecoverySession, + user_email: &UserEmail, + ticket: String, + ) -> Result; + + /// Consume a [`UserRecoveryTicket`] and mark the session as used + /// + /// # Parameters + /// + /// * `clock`: The clock to use to record the time of consumption + /// * `ticket`: The [`UserRecoveryTicket`] to consume + /// * `session`: The [`UserRecoverySession`] to mark as used + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// recovery session was already used + async fn consume_ticket( + &mut self, + clock: &dyn Clock, + user_recovery_ticket: UserRecoveryTicket, + user_recovery_session: UserRecoverySession, + ) -> Result; + + /// Cleanup old recovery sessions + /// + /// This will delete recovery sessions with IDs up to and including `until`. + /// Uses ULID cursor-based pagination for efficiency. + /// Tickets will cascade-delete automatically. + /// + /// Returns the number of sessions deleted and the cursor for the next batch + /// + /// # Parameters + /// + /// * `since`: The cursor to start from (exclusive), or `None` to start from + /// the beginning + /// * `until`: The maximum ULID to delete (inclusive upper bound) + /// * `limit`: The maximum number of sessions to delete in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +} + +repository_impl!(UserRecoveryRepository: + async fn lookup_session(&mut self, id: Ulid) -> Result, Self::Error>; + + async fn add_session( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + email: String, + user_agent: String, + ip_address: Option, + locale: String, + ) -> Result; + + async fn find_ticket( + &mut self, + ticket: &str, + ) -> Result, Self::Error>; + + async fn add_ticket( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user_recovery_session: &UserRecoverySession, + user_email: &UserEmail, + ticket: String, + ) -> Result; + + async fn consume_ticket( + &mut self, + clock: &dyn Clock, + user_recovery_ticket: UserRecoveryTicket, + user_recovery_session: UserRecoverySession, + ) -> Result; + + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/user/registration.rs b/matrix-authentication-service/crates/storage/src/user/registration.rs new file mode 100644 index 00000000..dc2a73f9 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/user/registration.rs @@ -0,0 +1,279 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use mas_data_model::{ + Clock, UpstreamOAuthAuthorizationSession, UserEmailAuthentication, UserRegistration, + UserRegistrationToken, +}; +use rand_core::RngCore; +use ulid::Ulid; +use url::Url; + +use crate::repository_impl; + +/// A [`UserRegistrationRepository`] helps interacting with [`UserRegistration`] +/// saved in the storage backend +#[async_trait] +pub trait UserRegistrationRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a [`UserRegistration`] by its ID + /// + /// Returns `None` if no [`UserRegistration`] was found + /// + /// # Parameters + /// + /// * `id`: The ID of the [`UserRegistration`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Create a new [`UserRegistration`] session + /// + /// Returns the newly created [`UserRegistration`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `username`: The username of the user + /// * `ip_address`: The IP address of the user agent, if any + /// * `user_agent`: The user agent of the user agent, if any + /// * `post_auth_action`: The post auth action to execute after the + /// registration, if any + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + username: String, + ip_address: Option, + user_agent: Option, + post_auth_action: Option, + ) -> Result; + + /// Set the display name of a [`UserRegistration`] + /// + /// Returns the updated [`UserRegistration`] + /// + /// # Parameters + /// + /// * `user_registration`: The [`UserRegistration`] to update + /// * `display_name`: The display name to set + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// registration is already completed + async fn set_display_name( + &mut self, + user_registration: UserRegistration, + display_name: String, + ) -> Result; + + /// Set the terms URL of a [`UserRegistration`] + /// + /// Returns the updated [`UserRegistration`] + /// + /// # Parameters + /// + /// * `user_registration`: The [`UserRegistration`] to update + /// * `terms_url`: The terms URL to set + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// registration is already completed + async fn set_terms_url( + &mut self, + user_registration: UserRegistration, + terms_url: Url, + ) -> Result; + + /// Set the email authentication code of a [`UserRegistration`] + /// + /// Returns the updated [`UserRegistration`] + /// + /// # Parameters + /// + /// * `user_registration`: The [`UserRegistration`] to update + /// * `email_authentication`: The [`UserEmailAuthentication`] to set + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// registration is already completed + async fn set_email_authentication( + &mut self, + user_registration: UserRegistration, + email_authentication: &UserEmailAuthentication, + ) -> Result; + + /// Set the password of a [`UserRegistration`] + /// + /// Returns the updated [`UserRegistration`] + /// + /// # Parameters + /// + /// * `user_registration`: The [`UserRegistration`] to update + /// * `hashed_password`: The hashed password to set + /// * `version`: The version of the hashing scheme + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// registration is already completed + async fn set_password( + &mut self, + user_registration: UserRegistration, + hashed_password: String, + version: u16, + ) -> Result; + + /// Set the registration token of a [`UserRegistration`] + /// + /// Returns the updated [`UserRegistration`] + /// + /// # Parameters + /// + /// * `user_registration`: The [`UserRegistration`] to update + /// * `user_registration_token`: The [`UserRegistrationToken`] to set + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// registration is already completed + async fn set_registration_token( + &mut self, + user_registration: UserRegistration, + user_registration_token: &UserRegistrationToken, + ) -> Result; + + /// Set an [`UpstreamOAuthAuthorizationSession`] to associate with a + /// [`UserRegistration`] + /// + /// Returns the updated [`UserRegistration`] + /// + /// # Parameters + /// + /// * `user_registration`: The [`UserRegistration`] to update + /// * `upstream_oauth_authorization_session`: The + /// [`UpstreamOAuthAuthorizationSession`] to set + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// registration is already completed + async fn set_upstream_oauth_authorization_session( + &mut self, + user_registration: UserRegistration, + upstream_oauth_authorization_session: &UpstreamOAuthAuthorizationSession, + ) -> Result; + + /// Complete a [`UserRegistration`] + /// + /// Returns the updated [`UserRegistration`] + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `user_registration`: The [`UserRegistration`] to complete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails or if the + /// registration is already completed + async fn complete( + &mut self, + clock: &dyn Clock, + user_registration: UserRegistration, + ) -> Result; + + /// Cleanup [`UserRegistration`]s between the given IDs. + /// + /// Returns the number of registrations deleted, as well as the ID of the + /// last registration deleted. + /// + /// # Parameters + /// + /// * `since`: An optional ID to start from + /// * `until`: The ID until which to clean up registrations + /// * `limit`: The maximum number of registrations to clean up + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +} + +repository_impl!(UserRegistrationRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + username: String, + ip_address: Option, + user_agent: Option, + post_auth_action: Option, + ) -> Result; + async fn set_display_name( + &mut self, + user_registration: UserRegistration, + display_name: String, + ) -> Result; + async fn set_terms_url( + &mut self, + user_registration: UserRegistration, + terms_url: Url, + ) -> Result; + async fn set_email_authentication( + &mut self, + user_registration: UserRegistration, + email_authentication: &UserEmailAuthentication, + ) -> Result; + async fn set_password( + &mut self, + user_registration: UserRegistration, + hashed_password: String, + version: u16, + ) -> Result; + async fn set_registration_token( + &mut self, + user_registration: UserRegistration, + user_registration_token: &UserRegistrationToken, + ) -> Result; + async fn set_upstream_oauth_authorization_session( + &mut self, + user_registration: UserRegistration, + upstream_oauth_authorization_session: &UpstreamOAuthAuthorizationSession, + ) -> Result; + async fn complete( + &mut self, + clock: &dyn Clock, + user_registration: UserRegistration, + ) -> Result; + async fn cleanup( + &mut self, + since: Option, + until: Ulid, + limit: usize, + ) -> Result<(usize, Option), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/user/registration_token.rs b/matrix-authentication-service/crates/storage/src/user/registration_token.rs new file mode 100644 index 00000000..1fb98550 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/user/registration_token.rs @@ -0,0 +1,319 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{Clock, UserRegistrationToken}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::repository_impl; + +/// A filter to apply when listing [`UserRegistrationToken`]s +#[derive(Debug, Clone, Copy)] +pub struct UserRegistrationTokenFilter { + now: DateTime, + has_been_used: Option, + is_revoked: Option, + is_expired: Option, + is_valid: Option, +} + +impl UserRegistrationTokenFilter { + /// Create a new empty filter + #[must_use] + pub fn new(now: DateTime) -> Self { + Self { + now, + has_been_used: None, + is_revoked: None, + is_expired: None, + is_valid: None, + } + } + + /// Filter by whether the token has been used at least once + #[must_use] + pub fn with_been_used(mut self, has_been_used: bool) -> Self { + self.has_been_used = Some(has_been_used); + self + } + + /// Filter by revoked status + #[must_use] + pub fn with_revoked(mut self, is_revoked: bool) -> Self { + self.is_revoked = Some(is_revoked); + self + } + + /// Filter by expired status + #[must_use] + pub fn with_expired(mut self, is_expired: bool) -> Self { + self.is_expired = Some(is_expired); + self + } + + /// Filter by valid status (meaning: not expired, not revoked, and still + /// with uses left) + #[must_use] + pub fn with_valid(mut self, is_valid: bool) -> Self { + self.is_valid = Some(is_valid); + self + } + + /// Get the used status filter + /// + /// Returns [`None`] if no used status filter was set + #[must_use] + pub fn has_been_used(&self) -> Option { + self.has_been_used + } + + /// Get the revoked status filter + /// + /// Returns [`None`] if no revoked status filter was set + #[must_use] + pub fn is_revoked(&self) -> Option { + self.is_revoked + } + + /// Get the expired status filter + /// + /// Returns [`None`] if no expired status filter was set + #[must_use] + pub fn is_expired(&self) -> Option { + self.is_expired + } + + /// Get the valid status filter + /// + /// Returns [`None`] if no valid status filter was set + #[must_use] + pub fn is_valid(&self) -> Option { + self.is_valid + } + + /// Get the current time for this filter evaluation + #[must_use] + pub fn now(&self) -> DateTime { + self.now + } +} + +/// A [`UserRegistrationTokenRepository`] helps interacting with +/// [`UserRegistrationToken`] saved in the storage backend +#[async_trait] +pub trait UserRegistrationTokenRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a [`UserRegistrationToken`] by its ID + /// + /// Returns `None` if no [`UserRegistrationToken`] was found + /// + /// # Parameters + /// + /// * `id`: The ID of the [`UserRegistrationToken`] to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Lookup a [`UserRegistrationToken`] by its token string + /// + /// Returns `None` if no [`UserRegistrationToken`] was found + /// + /// # Parameters + /// + /// * `token`: The token string to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn find_by_token( + &mut self, + token: &str, + ) -> Result, Self::Error>; + + /// Create a new [`UserRegistrationToken`] + /// + /// Returns the newly created [`UserRegistrationToken`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `token`: The token string + /// * `usage_limit`: Optional limit on how many times the token can be used + /// * `expires_at`: Optional expiration time for the token + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + token: String, + usage_limit: Option, + expires_at: Option>, + ) -> Result; + + /// Increment the usage count of a [`UserRegistrationToken`] + /// + /// Returns the updated [`UserRegistrationToken`] + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `token`: The [`UserRegistrationToken`] to update + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn use_token( + &mut self, + clock: &dyn Clock, + token: UserRegistrationToken, + ) -> Result; + + /// Revoke a [`UserRegistrationToken`] + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `token`: The [`UserRegistrationToken`] to delete + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn revoke( + &mut self, + clock: &dyn Clock, + token: UserRegistrationToken, + ) -> Result; + + /// Unrevoke a previously revoked [`UserRegistrationToken`] + /// + /// # Parameters + /// + /// * `token`: The [`UserRegistrationToken`] to unrevoke + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn unrevoke( + &mut self, + token: UserRegistrationToken, + ) -> Result; + + /// Set the expiration time of a [`UserRegistrationToken`] + /// + /// # Parameters + /// + /// * `token`: The [`UserRegistrationToken`] to update + /// * `expires_at`: The new expiration time, or `None` to remove the + /// expiration + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn set_expiry( + &mut self, + token: UserRegistrationToken, + expires_at: Option>, + ) -> Result; + + /// Set the usage limit of a [`UserRegistrationToken`] + /// + /// # Parameters + /// + /// * `token`: The [`UserRegistrationToken`] to update + /// * `usage_limit`: The new usage limit, or `None` to remove the limit + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn set_usage_limit( + &mut self, + token: UserRegistrationToken, + usage_limit: Option, + ) -> Result; + + /// List [`UserRegistrationToken`]s based on the provided filter + /// + /// Returns a list of matching [`UserRegistrationToken`]s + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: UserRegistrationTokenFilter, + pagination: crate::Pagination, + ) -> Result, Self::Error>; + + /// Count [`UserRegistrationToken`]s based on the provided filter + /// + /// Returns the number of matching [`UserRegistrationToken`]s + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: UserRegistrationTokenFilter) -> Result; +} + +repository_impl!(UserRegistrationTokenRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + async fn find_by_token(&mut self, token: &str) -> Result, Self::Error>; + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + token: String, + usage_limit: Option, + expires_at: Option>, + ) -> Result; + async fn use_token( + &mut self, + clock: &dyn Clock, + token: UserRegistrationToken, + ) -> Result; + async fn revoke( + &mut self, + clock: &dyn Clock, + token: UserRegistrationToken, + ) -> Result; + async fn unrevoke( + &mut self, + token: UserRegistrationToken, + ) -> Result; + async fn set_expiry( + &mut self, + token: UserRegistrationToken, + expires_at: Option>, + ) -> Result; + async fn set_usage_limit( + &mut self, + token: UserRegistrationToken, + usage_limit: Option, + ) -> Result; + async fn list( + &mut self, + filter: UserRegistrationTokenFilter, + pagination: crate::Pagination, + ) -> Result, Self::Error>; + async fn count(&mut self, filter: UserRegistrationTokenFilter) -> Result; +); diff --git a/matrix-authentication-service/crates/storage/src/user/session.rs b/matrix-authentication-service/crates/storage/src/user/session.rs new file mode 100644 index 00000000..5e4a108e --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/user/session.rs @@ -0,0 +1,428 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::net::IpAddr; + +use async_trait::async_trait; +use chrono::{DateTime, Utc}; +use mas_data_model::{ + Authentication, BrowserSession, Clock, Password, UpstreamOAuthAuthorizationSession, User, +}; +use rand_core::RngCore; +use ulid::Ulid; + +use crate::{ + Pagination, pagination::Page, repository_impl, upstream_oauth2::UpstreamOAuthSessionFilter, +}; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum BrowserSessionState { + Active, + Finished, +} + +impl BrowserSessionState { + pub fn is_active(self) -> bool { + matches!(self, Self::Active) + } + + pub fn is_finished(self) -> bool { + matches!(self, Self::Finished) + } +} + +/// Filter parameters for listing browser sessions +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct BrowserSessionFilter<'a> { + user: Option<&'a User>, + state: Option, + last_active_before: Option>, + last_active_after: Option>, + linked_to_upstream_sessions: Option>, +} + +impl<'a> BrowserSessionFilter<'a> { + /// Create a new [`BrowserSessionFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the user who owns the browser sessions + #[must_use] + pub fn for_user(mut self, user: &'a User) -> Self { + self.user = Some(user); + self + } + + /// Get the user filter + #[must_use] + pub fn user(&self) -> Option<&User> { + self.user + } + + /// Only return sessions with a last active time before the given time + #[must_use] + pub fn with_last_active_before(mut self, last_active_before: DateTime) -> Self { + self.last_active_before = Some(last_active_before); + self + } + + /// Only return sessions with a last active time after the given time + #[must_use] + pub fn with_last_active_after(mut self, last_active_after: DateTime) -> Self { + self.last_active_after = Some(last_active_after); + self + } + + /// Get the last active before filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_before(&self) -> Option> { + self.last_active_before + } + + /// Get the last active after filter + /// + /// Returns [`None`] if no client filter was set + #[must_use] + pub fn last_active_after(&self) -> Option> { + self.last_active_after + } + + /// Only return active browser sessions + #[must_use] + pub fn active_only(mut self) -> Self { + self.state = Some(BrowserSessionState::Active); + self + } + + /// Only return finished browser sessions + #[must_use] + pub fn finished_only(mut self) -> Self { + self.state = Some(BrowserSessionState::Finished); + self + } + + /// Get the state filter + #[must_use] + pub fn state(&self) -> Option { + self.state + } + + /// Only return browser sessions linked to the given upstream OAuth sessions + #[must_use] + pub fn linked_to_upstream_sessions_only( + mut self, + filter: UpstreamOAuthSessionFilter<'a>, + ) -> Self { + self.linked_to_upstream_sessions = Some(filter); + self + } + + /// Get the upstream OAuth session filter + #[must_use] + pub fn linked_to_upstream_sessions(&self) -> Option> { + self.linked_to_upstream_sessions + } +} + +/// A [`BrowserSessionRepository`] helps interacting with [`BrowserSession`] +/// saved in the storage backend +#[async_trait] +pub trait BrowserSessionRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Lookup a [`BrowserSession`] by its ID + /// + /// Returns `None` if the session is not found + /// + /// # Parameters + /// + /// * `id`: The ID of the session to lookup + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + + /// Create a new [`BrowserSession`] for a [`User`] + /// + /// Returns the newly created [`BrowserSession`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `user`: The user to create the session for + /// * `user_agent`: If available, the user agent of the browser + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + user_agent: Option, + ) -> Result; + + /// Finish a [`BrowserSession`] + /// + /// Returns the finished session + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `user_session`: The session to finish + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn finish( + &mut self, + clock: &dyn Clock, + user_session: BrowserSession, + ) -> Result; + + /// Mark all the [`BrowserSession`] matching the given filter as finished + /// + /// Returns the number of sessions affected + /// + /// # Parameters + /// + /// * `clock`: The clock used to generate timestamps + /// * `filter`: The filter parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn finish_bulk( + &mut self, + clock: &dyn Clock, + filter: BrowserSessionFilter<'_>, + ) -> Result; + + /// List [`BrowserSession`] with the given filter and pagination + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: BrowserSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count the number of [`BrowserSession`] with the given filter + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: BrowserSessionFilter<'_>) -> Result; + + /// Authenticate a [`BrowserSession`] with the given [`Password`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `user_session`: The session to authenticate + /// * `user_password`: The password which was used to authenticate + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn authenticate_with_password( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user_session: &BrowserSession, + user_password: &Password, + ) -> Result; + + /// Authenticate a [`BrowserSession`] with the given + /// [`UpstreamOAuthAuthorizationSession`] + /// + /// # Parameters + /// + /// * `rng`: The random number generator to use + /// * `clock`: The clock used to generate timestamps + /// * `user_session`: The session to authenticate + /// * `upstream_oauth_session`: The upstream OAuth session which was used to + /// authenticate + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn authenticate_with_upstream( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user_session: &BrowserSession, + upstream_oauth_session: &UpstreamOAuthAuthorizationSession, + ) -> Result; + + /// Get the last successful authentication for a [`BrowserSession`] + /// + /// # Params + /// + /// * `user_session`: The session for which to get the last authentication + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn get_last_authentication( + &mut self, + user_session: &BrowserSession, + ) -> Result, Self::Error>; + + /// Record a batch of [`BrowserSession`] activity + /// + /// # Parameters + /// + /// * `activity`: A list of tuples containing the session ID, the last + /// activity timestamp and the IP address of the client + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn record_batch_activity( + &mut self, + activity: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error>; + + /// Cleanup finished [`BrowserSession`]s + /// + /// Deletes sessions finished between `since` and `until`, but only if they + /// have no child sessions (`compat_sessions` or `oauth2_sessions`). Returns + /// the number of deleted sessions and the timestamp of the last deleted + /// session for pagination. + /// + /// # Parameters + /// + /// * `since`: The earliest finish time to delete (exclusive). If `None`, + /// starts from the beginning. + /// * `until`: The latest finish time to delete (exclusive) + /// * `limit`: Maximum number of sessions to delete in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_finished( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + /// Clear IP addresses from sessions inactive since the threshold + /// + /// Sets `last_active_ip` to `NULL` for sessions where `last_active_at` is + /// before the threshold. Returns the number of sessions affected and the + /// last `last_active_at` timestamp processed for pagination. + /// + /// # Parameters + /// + /// * `since`: Only process sessions with `last_active_at` at or after this + /// timestamp (exclusive). If `None`, starts from the beginning. + /// * `threshold`: Clear IPs for sessions with `last_active_at` before this + /// time + /// * `limit`: Maximum number of sessions to update in this batch + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn cleanup_inactive_ips( + &mut self, + since: Option>, + threshold: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +} + +repository_impl!(BrowserSessionRepository: + async fn lookup(&mut self, id: Ulid) -> Result, Self::Error>; + async fn add( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + user_agent: Option, + ) -> Result; + async fn finish( + &mut self, + clock: &dyn Clock, + user_session: BrowserSession, + ) -> Result; + + async fn finish_bulk( + &mut self, + clock: &dyn Clock, + filter: BrowserSessionFilter<'_>, + ) -> Result; + + async fn list( + &mut self, + filter: BrowserSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + async fn count(&mut self, filter: BrowserSessionFilter<'_>) -> Result; + + async fn authenticate_with_password( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user_session: &BrowserSession, + user_password: &Password, + ) -> Result; + + async fn authenticate_with_upstream( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user_session: &BrowserSession, + upstream_oauth_session: &UpstreamOAuthAuthorizationSession, + ) -> Result; + + async fn get_last_authentication( + &mut self, + user_session: &BrowserSession, + ) -> Result, Self::Error>; + + async fn record_batch_activity( + &mut self, + activity: Vec<(Ulid, DateTime, Option)>, + ) -> Result<(), Self::Error>; + + async fn cleanup_finished( + &mut self, + since: Option>, + until: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; + + async fn cleanup_inactive_ips( + &mut self, + since: Option>, + threshold: DateTime, + limit: usize, + ) -> Result<(usize, Option>), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/user/terms.rs b/matrix-authentication-service/crates/storage/src/user/terms.rs new file mode 100644 index 00000000..15536097 --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/user/terms.rs @@ -0,0 +1,50 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use mas_data_model::{Clock, User}; +use rand_core::RngCore; +use url::Url; + +use crate::repository_impl; + +/// A [`UserTermsRepository`] helps interacting with the terms of service agreed +/// by a [`User`] +#[async_trait] +pub trait UserTermsRepository: Send + Sync { + /// The error type returned by the repository + type Error; + + /// Accept the terms of service by a [`User`] + /// + /// # Parameters + /// + /// * `rng`: A random number generator used to generate IDs + /// * `clock`: The clock used to generate timestamps + /// * `user`: The [`User`] accepting the terms + /// * `terms_url`: The URL of the terms of service the user is accepting + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn accept_terms( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + terms_url: Url, + ) -> Result<(), Self::Error>; +} + +repository_impl!(UserTermsRepository: + async fn accept_terms( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + user: &User, + terms_url: Url, + ) -> Result<(), Self::Error>; +); diff --git a/matrix-authentication-service/crates/storage/src/utils.rs b/matrix-authentication-service/crates/storage/src/utils.rs new file mode 100644 index 00000000..a6f380fe --- /dev/null +++ b/matrix-authentication-service/crates/storage/src/utils.rs @@ -0,0 +1,71 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Wrappers and useful type aliases + +/// A wrapper which is used to map the error type of a repository to another +pub struct MapErr { + pub(crate) inner: R, + pub(crate) mapper: F, + _private: (), +} + +impl MapErr { + /// Create a new [`MapErr`] wrapper from an inner repository and a mapper + /// function + #[must_use] + pub fn new(inner: R, mapper: F) -> Self { + Self { + inner, + mapper, + _private: (), + } + } +} + +/// A macro to implement a repository trait for the [`MapErr`] wrapper and for +/// [`Box`] +#[macro_export] +macro_rules! repository_impl { + ($repo_trait:ident: + $( + async fn $method:ident ( + &mut self + $(, $arg:ident: $arg_ty:ty )* + $(,)? + ) -> Result<$ret_ty:ty, Self::Error>; + )* + ) => { + #[::async_trait::async_trait] + impl $repo_trait for ::std::boxed::Box + where + R: $repo_trait, + { + type Error = ::Error; + + $( + async fn $method (&mut self $(, $arg: $arg_ty)*) -> Result<$ret_ty, Self::Error> { + (**self).$method ( $($arg),* ).await + } + )* + } + + #[::async_trait::async_trait] + impl $repo_trait for $crate::MapErr + where + R: $repo_trait, + F: FnMut(::Error) -> E + ::std::marker::Send + ::std::marker::Sync, + { + type Error = E; + + $( + async fn $method (&mut self $(, $arg: $arg_ty)*) -> Result<$ret_ty, Self::Error> { + self.inner.$method ( $($arg),* ).await.map_err(&mut self.mapper) + } + )* + } + }; +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-026adeffc646b41ebc096bb874d110039b9a4a0425fd566e401f56ea215de0dd.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-026adeffc646b41ebc096bb874d110039b9a4a0425fd566e401f56ea215de0dd.json new file mode 100644 index 00000000..fa5f442e --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-026adeffc646b41ebc096bb874d110039b9a4a0425fd566e401f56ea215de0dd.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas__upstream_oauth_links\n (upstream_oauth_link_id, user_id, upstream_oauth_provider_id, subject, created_at)\n SELECT * FROM UNNEST($1::UUID[], $2::UUID[], $3::UUID[], $4::TEXT[], $5::TIMESTAMP WITH TIME ZONE[])\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "UuidArray", + "UuidArray", + "TextArray", + "TimestamptzArray" + ] + }, + "nullable": [] + }, + "hash": "026adeffc646b41ebc096bb874d110039b9a4a0425fd566e401f56ea215de0dd" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-07ec66733b67a9990cc9d483b564c8d05c577cf8f049d8822746c7d1dbd23752.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-07ec66733b67a9990cc9d483b564c8d05c577cf8f049d8822746c7d1dbd23752.json new file mode 100644 index 00000000..c7f5fce5 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-07ec66733b67a9990cc9d483b564c8d05c577cf8f049d8822746c7d1dbd23752.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas_restore_indices (name, table_name, definition)\n VALUES ($1, $2, $3)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "07ec66733b67a9990cc9d483b564c8d05c577cf8f049d8822746c7d1dbd23752" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-09db58b250c20ab9d1701653165233e5c9aabfdae1f0ee9b77c909b2bb2f3e25.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-09db58b250c20ab9d1701653165233e5c9aabfdae1f0ee9b77c909b2bb2f3e25.json new file mode 100644 index 00000000..97e8a07a --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-09db58b250c20ab9d1701653165233e5c9aabfdae1f0ee9b77c909b2bb2f3e25.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas__compat_sessions (\n compat_session_id, user_id,\n device_id, human_name,\n created_at, is_synapse_admin,\n last_active_at, last_active_ip,\n user_agent)\n SELECT * FROM UNNEST(\n $1::UUID[], $2::UUID[],\n $3::TEXT[], $4::TEXT[],\n $5::TIMESTAMP WITH TIME ZONE[], $6::BOOLEAN[],\n $7::TIMESTAMP WITH TIME ZONE[], $8::INET[],\n $9::TEXT[])\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "UuidArray", + "TextArray", + "TextArray", + "TimestamptzArray", + "BoolArray", + "TimestamptzArray", + "InetArray", + "TextArray" + ] + }, + "nullable": [] + }, + "hash": "09db58b250c20ab9d1701653165233e5c9aabfdae1f0ee9b77c909b2bb2f3e25" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-12112011318abc0bdd7f722ed8c5d4a86bf5758f8c32d9d41a22999b2f0698ca.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-12112011318abc0bdd7f722ed8c5d4a86bf5758f8c32d9d41a22999b2f0698ca.json new file mode 100644 index 00000000..f1b8bad9 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-12112011318abc0bdd7f722ed8c5d4a86bf5758f8c32d9d41a22999b2f0698ca.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT conrelid::regclass::text AS \"table_name!\", conname AS \"name!\", pg_get_constraintdef(c.oid) AS \"definition!\"\n FROM pg_constraint c\n JOIN pg_namespace n ON n.oid = c.connamespace\n WHERE contype IN ('f', 'p', 'u') AND conrelid::regclass::text = $1\n AND n.nspname = current_schema;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "table_name!", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "name!", + "type_info": "Name" + }, + { + "ordinal": 2, + "name": "definition!", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + null, + false, + null + ] + }, + "hash": "12112011318abc0bdd7f722ed8c5d4a86bf5758f8c32d9d41a22999b2f0698ca" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-1d1004d0fb5939fbf30c1986b80b986b1b4864a778525d0b8b0ad6678aef3e9f.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-1d1004d0fb5939fbf30c1986b80b986b1b4864a778525d0b8b0ad6678aef3e9f.json new file mode 100644 index 00000000..c65dfb7a --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-1d1004d0fb5939fbf30c1986b80b986b1b4864a778525d0b8b0ad6678aef3e9f.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas__compat_refresh_tokens (\n compat_refresh_token_id,\n compat_session_id,\n compat_access_token_id,\n refresh_token,\n created_at)\n SELECT * FROM UNNEST(\n $1::UUID[],\n $2::UUID[],\n $3::UUID[],\n $4::TEXT[],\n $5::TIMESTAMP WITH TIME ZONE[])\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "UuidArray", + "UuidArray", + "TextArray", + "TimestamptzArray" + ] + }, + "nullable": [] + }, + "hash": "1d1004d0fb5939fbf30c1986b80b986b1b4864a778525d0b8b0ad6678aef3e9f" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-204cf4811150a7fdeafa9373647a9cd62ac3c9e58155882858c6056e2ef6c30d.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-204cf4811150a7fdeafa9373647a9cd62ac3c9e58155882858c6056e2ef6c30d.json new file mode 100644 index 00000000..464dd900 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-204cf4811150a7fdeafa9373647a9cd62ac3c9e58155882858c6056e2ef6c30d.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas__user_unsupported_third_party_ids\n (user_id, medium, address, created_at)\n SELECT * FROM UNNEST($1::UUID[], $2::TEXT[], $3::TEXT[], $4::TIMESTAMP WITH TIME ZONE[])\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "TextArray", + "TextArray", + "TimestamptzArray" + ] + }, + "nullable": [] + }, + "hash": "204cf4811150a7fdeafa9373647a9cd62ac3c9e58155882858c6056e2ef6c30d" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-207b880ec2dd484ad05a7138ba485277958b66e4534561686c073e282fafaf2a.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-207b880ec2dd484ad05a7138ba485277958b66e4534561686c073e282fafaf2a.json new file mode 100644 index 00000000..79688d80 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-207b880ec2dd484ad05a7138ba485277958b66e4534561686c073e282fafaf2a.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas__users (\n user_id, username,\n created_at, locked_at,\n deactivated_at,\n can_request_admin, is_guest)\n SELECT * FROM UNNEST(\n $1::UUID[], $2::TEXT[],\n $3::TIMESTAMP WITH TIME ZONE[], $4::TIMESTAMP WITH TIME ZONE[],\n $5::TIMESTAMP WITH TIME ZONE[],\n $6::BOOL[], $7::BOOL[])\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "TextArray", + "TimestamptzArray", + "TimestamptzArray", + "TimestamptzArray", + "BoolArray", + "BoolArray" + ] + }, + "nullable": [] + }, + "hash": "207b880ec2dd484ad05a7138ba485277958b66e4534561686c073e282fafaf2a" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-24f6ce6280dc6675ab1ebdde0c5e3db8ff7a686180d71052911879f186ed1c8e.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-24f6ce6280dc6675ab1ebdde0c5e3db8ff7a686180d71052911879f186ed1c8e.json new file mode 100644 index 00000000..d736336f --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-24f6ce6280dc6675ab1ebdde0c5e3db8ff7a686180d71052911879f186ed1c8e.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas__user_passwords\n (user_password_id, user_id, hashed_password, created_at, version)\n SELECT * FROM UNNEST($1::UUID[], $2::UUID[], $3::TEXT[], $4::TIMESTAMP WITH TIME ZONE[], $5::INTEGER[])\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "UuidArray", + "TextArray", + "TimestamptzArray", + "Int4Array" + ] + }, + "nullable": [] + }, + "hash": "24f6ce6280dc6675ab1ebdde0c5e3db8ff7a686180d71052911879f186ed1c8e" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-486f3177dcf6117c6b966954a44d9f96a754eba64912566e81a90bd4cbd186f0.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-486f3177dcf6117c6b966954a44d9f96a754eba64912566e81a90bd4cbd186f0.json new file mode 100644 index 00000000..68b0722e --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-486f3177dcf6117c6b966954a44d9f96a754eba64912566e81a90bd4cbd186f0.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT indexname AS \"name!\", indexdef AS \"definition!\", schemaname AS \"table_name!\"\n FROM pg_indexes\n WHERE schemaname = current_schema AND tablename = $1 AND indexname IS NOT NULL AND indexdef IS NOT NULL\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "name!", + "type_info": "Name" + }, + { + "ordinal": 1, + "name": "definition!", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "table_name!", + "type_info": "Name" + } + ], + "parameters": { + "Left": [ + "Name" + ] + }, + "nullable": [ + true, + true, + true + ] + }, + "hash": "486f3177dcf6117c6b966954a44d9f96a754eba64912566e81a90bd4cbd186f0" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-5b4840f42ae00c5dc9f59f2745d664b16ebd813dfa0aa32a6d39dd5c393af299.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-5b4840f42ae00c5dc9f59f2745d664b16ebd813dfa0aa32a6d39dd5c393af299.json new file mode 100644 index 00000000..3dcc1fc4 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-5b4840f42ae00c5dc9f59f2745d664b16ebd813dfa0aa32a6d39dd5c393af299.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT conrelid::regclass::text AS \"table_name!\", conname AS \"name!\", pg_get_constraintdef(c.oid) AS \"definition!\"\n FROM pg_constraint c\n JOIN pg_namespace n ON n.oid = c.connamespace\n WHERE contype = 'f' AND confrelid::regclass::text = $1\n AND n.nspname = current_schema;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "table_name!", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "name!", + "type_info": "Name" + }, + { + "ordinal": 2, + "name": "definition!", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + null, + false, + null + ] + }, + "hash": "5b4840f42ae00c5dc9f59f2745d664b16ebd813dfa0aa32a6d39dd5c393af299" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-69aa96208513c3ea64a446c7739747fcb5e79d7e8c1212b2a679c3bde908ce93.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-69aa96208513c3ea64a446c7739747fcb5e79d7e8c1212b2a679c3bde908ce93.json new file mode 100644 index 00000000..855da3ba --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-69aa96208513c3ea64a446c7739747fcb5e79d7e8c1212b2a679c3bde908ce93.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas_restore_constraints (name, table_name, definition)\n VALUES ($1, $2, $3)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "69aa96208513c3ea64a446c7739747fcb5e79d7e8c1212b2a679c3bde908ce93" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-78ed3bf1032cd678b42230d68fb2b8e3d74161c8b6c5fe1a746b6958ccd2fd84.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-78ed3bf1032cd678b42230d68fb2b8e3d74161c8b6c5fe1a746b6958ccd2fd84.json new file mode 100644 index 00000000..759cc5f8 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-78ed3bf1032cd678b42230d68fb2b8e3d74161c8b6c5fe1a746b6958ccd2fd84.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT table_name, name, definition FROM syn2mas_restore_constraints ORDER BY order_key", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "table_name", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "definition", + "type_info": "Text" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "78ed3bf1032cd678b42230d68fb2b8e3d74161c8b6c5fe1a746b6958ccd2fd84" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-86b2b02fbb6350100d794e4d0fa3c67bf00fd3e411f769b9f25dec27428489ed.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-86b2b02fbb6350100d794e4d0fa3c67bf00fd3e411f769b9f25dec27428489ed.json new file mode 100644 index 00000000..dd8a8e30 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-86b2b02fbb6350100d794e4d0fa3c67bf00fd3e411f769b9f25dec27428489ed.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas__compat_access_tokens (\n compat_access_token_id,\n compat_session_id,\n access_token,\n created_at,\n expires_at)\n SELECT * FROM UNNEST(\n $1::UUID[],\n $2::UUID[],\n $3::TEXT[],\n $4::TIMESTAMP WITH TIME ZONE[],\n $5::TIMESTAMP WITH TIME ZONE[])\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "UuidArray", + "TextArray", + "TimestamptzArray", + "TimestamptzArray" + ] + }, + "nullable": [] + }, + "hash": "86b2b02fbb6350100d794e4d0fa3c67bf00fd3e411f769b9f25dec27428489ed" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-979bedd942b4f71c58f3672f2917cee05ac1a628e51fe61ba6dfed253e0c63c2.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-979bedd942b4f71c58f3672f2917cee05ac1a628e51fe61ba6dfed253e0c63c2.json new file mode 100644 index 00000000..9ae8f1e3 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-979bedd942b4f71c58f3672f2917cee05ac1a628e51fe61ba6dfed253e0c63c2.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT table_name, name, definition FROM syn2mas_restore_indices ORDER BY order_key", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "table_name", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "definition", + "type_info": "Text" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "979bedd942b4f71c58f3672f2917cee05ac1a628e51fe61ba6dfed253e0c63c2" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-b27828d7510d52456b50b4c4b9712878ee329ca72070d849eb61ac9c8f9d1c76.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-b27828d7510d52456b50b4c4b9712878ee329ca72070d849eb61ac9c8f9d1c76.json new file mode 100644 index 00000000..df1f3fb7 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-b27828d7510d52456b50b4c4b9712878ee329ca72070d849eb61ac9c8f9d1c76.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT 1 AS _dummy FROM pg_tables WHERE schemaname = current_schema\n AND tablename = ANY($1)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "_dummy", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "NameArray" + ] + }, + "nullable": [ + null + ] + }, + "hash": "b27828d7510d52456b50b4c4b9712878ee329ca72070d849eb61ac9c8f9d1c76" +} diff --git a/matrix-authentication-service/crates/syn2mas/.sqlx/query-ebf68b70b3e22a04b57b5587b4b099255155193dafbbd185cd8f26d93ff423a7.json b/matrix-authentication-service/crates/syn2mas/.sqlx/query-ebf68b70b3e22a04b57b5587b4b099255155193dafbbd185cd8f26d93ff423a7.json new file mode 100644 index 00000000..12de563c --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/.sqlx/query-ebf68b70b3e22a04b57b5587b4b099255155193dafbbd185cd8f26d93ff423a7.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO syn2mas__user_emails\n (user_email_id, user_id, email, created_at)\n SELECT * FROM UNNEST($1::UUID[], $2::UUID[], $3::TEXT[], $4::TIMESTAMP WITH TIME ZONE[])\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "UuidArray", + "UuidArray", + "TextArray", + "TimestamptzArray" + ] + }, + "nullable": [] + }, + "hash": "ebf68b70b3e22a04b57b5587b4b099255155193dafbbd185cd8f26d93ff423a7" +} diff --git a/matrix-authentication-service/crates/syn2mas/Cargo.toml b/matrix-authentication-service/crates/syn2mas/Cargo.toml new file mode 100644 index 00000000..ca278bee --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/Cargo.toml @@ -0,0 +1,55 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "syn2mas" +version.workspace = true +license.workspace = true +authors.workspace = true +edition.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +arc-swap.workspace = true +bitflags.workspace = true +camino.workspace = true +chrono.workspace = true +compact_str.workspace = true +figment.workspace = true +futures-util.workspace = true +mas-config.workspace = true +mas-iana.workspace = true +mas-storage.workspace = true +mas-data-model.workspace = true +oauth2-types.workspace = true +opentelemetry-semantic-conventions.workspace = true +opentelemetry.workspace = true +rand_chacha.workspace = true +rand.workspace = true +rustc-hash.workspace = true +serde_json.workspace = true +serde.workspace = true +sqlx.workspace = true +thiserror-ext.workspace = true +thiserror.workspace = true +tokio-util.workspace = true +tokio.workspace = true +tracing.workspace = true +ulid.workspace = true +url.workspace = true +uuid.workspace = true + +[dev-dependencies] +anyhow.workspace = true +insta.workspace = true +serde.workspace = true + +mas-storage-pg.workspace = true diff --git a/matrix-authentication-service/crates/syn2mas/src/lib.rs b/matrix-authentication-service/crates/syn2mas/src/lib.rs new file mode 100644 index 00000000..3593858d --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/lib.rs @@ -0,0 +1,27 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod mas_writer; +mod synapse_reader; + +mod migration; +mod progress; +mod telemetry; + +type RandomState = rustc_hash::FxBuildHasher; +type HashMap = rustc_hash::FxHashMap; + +pub use self::{ + mas_writer::{MasWriter, checks::mas_pre_migration_checks, locking::LockedMasDatabase}, + migration::migrate, + progress::{Progress, ProgressCounter, ProgressStage}, + synapse_reader::{ + SynapseReader, + checks::{ + synapse_config_check, synapse_config_check_against_mas_config, synapse_database_check, + }, + config as synapse_config, + }, +}; diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/checks.rs b/matrix-authentication-service/crates/syn2mas/src/mas_writer/checks.rs new file mode 100644 index 00000000..ae0964bf --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/checks.rs @@ -0,0 +1,81 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! # MAS Database Checks +//! +//! This module provides safety checks to run against a MAS database before +//! running the Synapse-to-MAS migration. + +use thiserror::Error; +use thiserror_ext::ContextInto; +use tracing::Instrument as _; + +use super::{MAS_TABLES_AFFECTED_BY_MIGRATION, is_syn2mas_in_progress, locking::LockedMasDatabase}; + +#[derive(Debug, Error, ContextInto)] +pub enum Error { + #[error( + "The MAS database is not empty: rows found in at least `{table}`. Please drop and recreate the database, then try again." + )] + MasDatabaseNotEmpty { table: &'static str }, + + #[error("Query against {table} failed — is this actually a MAS database?")] + MaybeNotMas { + #[source] + source: sqlx::Error, + table: &'static str, + }, + + #[error(transparent)] + Sqlx(#[from] sqlx::Error), + + #[error("Unable to check if syn2mas is already in progress")] + UnableToCheckInProgress(#[source] super::Error), +} + +/// Check that a MAS database is ready for being migrated to. +/// +/// Concretely, this checks that the database is empty. +/// +/// If syn2mas is already in progress on this database, the checks are skipped. +/// +/// # Errors +/// +/// Errors are returned under the following circumstances: +/// +/// - If any database access error occurs. +/// - If any MAS tables involved in the migration are not empty. +/// - If we can't check whether syn2mas is already in progress on this database +/// or not. +#[tracing::instrument(name = "syn2mas.mas_pre_migration_checks", skip_all)] +pub async fn mas_pre_migration_checks(mas_connection: &mut LockedMasDatabase) -> Result<(), Error> { + if is_syn2mas_in_progress(mas_connection.as_mut()) + .await + .map_err(Error::UnableToCheckInProgress)? + { + // syn2mas already in progress, so we already performed the checks + return Ok(()); + } + + // Check that the database looks like a MAS database and that it is also an + // empty database. + + for &table in MAS_TABLES_AFFECTED_BY_MIGRATION { + let query = format!("SELECT 1 AS dummy FROM {table} LIMIT 1"); + let span = tracing::info_span!("db.query", db.query.text = query); + let row_present = sqlx::query(&query) + .fetch_optional(mas_connection.as_mut()) + .instrument(span) + .await + .into_maybe_not_mas(table)? + .is_some(); + + if row_present { + return Err(Error::MasDatabaseNotEmpty { table }); + } + } + + Ok(()) +} diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/constraint_pausing.rs b/matrix-authentication-service/crates/syn2mas/src/mas_writer/constraint_pausing.rs new file mode 100644 index 00000000..3bfef602 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/constraint_pausing.rs @@ -0,0 +1,170 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::time::Instant; + +use sqlx::PgConnection; +use tracing::{debug, info}; + +use super::{Error, IntoDatabase}; + +/// Description of a constraint, which allows recreating it later. +pub struct ConstraintDescription { + pub name: String, + pub table_name: String, + pub definition: String, +} + +pub struct IndexDescription { + pub name: String, + pub table_name: String, + pub definition: String, +} + +/// Look up and return the definition of a constraint. +pub async fn describe_constraints_on_table( + conn: &mut PgConnection, + table_name: &str, +) -> Result, Error> { + sqlx::query_as!( + ConstraintDescription, + r#" + SELECT conrelid::regclass::text AS "table_name!", conname AS "name!", pg_get_constraintdef(c.oid) AS "definition!" + FROM pg_constraint c + JOIN pg_namespace n ON n.oid = c.connamespace + WHERE contype IN ('f', 'p', 'u') AND conrelid::regclass::text = $1 + AND n.nspname = current_schema; + "#, + table_name + ).fetch_all(&mut *conn).await.into_database_with(|| format!("could not read constraint definitions of {table_name}")) +} + +/// Look up and return the definitions of foreign-key constraints whose +/// target table is the one specified. +pub async fn describe_foreign_key_constraints_to_table( + conn: &mut PgConnection, + target_table_name: &str, +) -> Result, Error> { + sqlx::query_as!( + ConstraintDescription, + r#" + SELECT conrelid::regclass::text AS "table_name!", conname AS "name!", pg_get_constraintdef(c.oid) AS "definition!" + FROM pg_constraint c + JOIN pg_namespace n ON n.oid = c.connamespace + WHERE contype = 'f' AND confrelid::regclass::text = $1 + AND n.nspname = current_schema; + "#, + target_table_name + ).fetch_all(&mut *conn).await.into_database_with(|| format!("could not read FK constraint definitions targetting {target_table_name}")) +} + +/// Look up and return the definitions of all indices on a given table. +pub async fn describe_indices_on_table( + conn: &mut PgConnection, + table_name: &str, +) -> Result, Error> { + sqlx::query_as!( + IndexDescription, + r#" + SELECT indexname AS "name!", indexdef AS "definition!", schemaname AS "table_name!" + FROM pg_indexes + WHERE schemaname = current_schema AND tablename = $1 AND indexname IS NOT NULL AND indexdef IS NOT NULL + "#, + table_name + ).fetch_all(&mut *conn).await.into_database("cannot search for indices") +} + +/// Drops a constraint from the database. +/// +/// The constraint must exist prior to this call. +pub async fn drop_constraint( + conn: &mut PgConnection, + constraint: &ConstraintDescription, +) -> Result<(), Error> { + let name = &constraint.name; + let table_name = &constraint.table_name; + debug!("dropping constraint {name} on table {table_name}"); + sqlx::query(&format!("ALTER TABLE {table_name} DROP CONSTRAINT {name};")) + .execute(&mut *conn) + .await + .into_database_with(|| format!("failed to drop constraint {name} on {table_name}"))?; + + Ok(()) +} + +/// Drops an index from the database. +/// +/// The index must exist prior to this call. +pub async fn drop_index(conn: &mut PgConnection, index: &IndexDescription) -> Result<(), Error> { + let index_name = &index.name; + debug!("dropping index {index_name}"); + sqlx::query(&format!("DROP INDEX {index_name};")) + .execute(&mut *conn) + .await + .into_database_with(|| format!("failed to temporarily drop {index_name}"))?; + + Ok(()) +} + +/// Restores (recreates) a constraint. +/// +/// The constraint must not exist prior to this call. +#[tracing::instrument(name = "syn2mas.restore_constraint", skip_all, fields(constraint.name = constraint.name))] +pub async fn restore_constraint( + conn: &mut PgConnection, + constraint: &ConstraintDescription, +) -> Result<(), Error> { + let start = Instant::now(); + + let ConstraintDescription { + name, + table_name, + definition, + } = &constraint; + + sqlx::query(&format!( + "ALTER TABLE {table_name} ADD CONSTRAINT {name} {definition};" + )) + .execute(conn) + .await + .into_database_with(|| { + format!("failed to recreate constraint {name} on {table_name} with {definition}") + })?; + + info!( + "constraint {name} rebuilt in {:.1}s", + Instant::now().duration_since(start).as_secs_f64() + ); + + Ok(()) +} + +/// Restores (recreates) a index. +/// +/// The index must not exist prior to this call. +#[tracing::instrument(name = "syn2mas.restore_index", skip_all, fields(index.name = index.name))] +pub async fn restore_index(conn: &mut PgConnection, index: &IndexDescription) -> Result<(), Error> { + let start = Instant::now(); + + let IndexDescription { + name, + table_name, + definition, + } = &index; + + sqlx::query(&format!("{definition};")) + .execute(conn) + .await + .into_database_with(|| { + format!("failed to recreate index {name} on {table_name} with {definition}") + })?; + + info!( + "index {name} rebuilt in {:.1}s", + Instant::now().duration_since(start).as_secs_f64() + ); + + Ok(()) +} diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/fixtures/upstream_provider.sql b/matrix-authentication-service/crates/syn2mas/src/mas_writer/fixtures/upstream_provider.sql new file mode 100644 index 00000000..957cedcb --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/fixtures/upstream_provider.sql @@ -0,0 +1,21 @@ +-- Copyright 2024, 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +INSERT INTO upstream_oauth_providers + ( + upstream_oauth_provider_id, + scope, + client_id, + token_endpoint_auth_method, + created_at + ) + VALUES + ( + '00000000-0000-0000-0000-000000000004', + 'openid', + 'someClientId', + 'client_secret_basic', + '2011-12-13 14:15:16Z' + ); diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/locking.rs b/matrix-authentication-service/crates/syn2mas/src/mas_writer/locking.rs new file mode 100644 index 00000000..96fd2d30 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/locking.rs @@ -0,0 +1,60 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::LazyLock; + +use sqlx::{ + Either, PgConnection, + postgres::{PgAdvisoryLock, PgAdvisoryLockGuard}, +}; + +static SYN2MAS_ADVISORY_LOCK: LazyLock = + LazyLock::new(|| PgAdvisoryLock::new("syn2mas-maswriter")); + +/// A wrapper around a Postgres connection which holds a session-wide advisory +/// lock preventing concurrent access by other syn2mas instances. +pub struct LockedMasDatabase { + inner: PgAdvisoryLockGuard<'static, PgConnection>, +} + +impl LockedMasDatabase { + /// Attempts to lock the MAS database against concurrent access by other + /// syn2mas instances. + /// + /// If the lock can be acquired, returns a `LockedMasDatabase`. + /// If the lock cannot be acquired, returns the connection back to the + /// caller wrapped in `Either::Right`. + /// + /// # Errors + /// + /// Errors are returned for underlying database errors. + pub async fn try_new( + mas_connection: PgConnection, + ) -> Result, sqlx::Error> { + SYN2MAS_ADVISORY_LOCK + .try_acquire(mas_connection) + .await + .map(|either| match either { + Either::Left(inner) => Either::Left(LockedMasDatabase { inner }), + Either::Right(unlocked) => Either::Right(unlocked), + }) + } + + /// Releases the advisory lock on the MAS database, returning the underlying + /// connection. + /// + /// # Errors + /// + /// Errors are returned for underlying database errors. + pub async fn unlock(self) -> Result { + self.inner.release_now().await + } +} + +impl AsMut for LockedMasDatabase { + fn as_mut(&mut self) -> &mut PgConnection { + self.inner.as_mut() + } +} diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/mod.rs b/matrix-authentication-service/crates/syn2mas/src/mas_writer/mod.rs new file mode 100644 index 00000000..bd42c14b --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/mod.rs @@ -0,0 +1,1716 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! # MAS Writer +//! +//! This module is responsible for writing new records to MAS' database. + +use std::{ + fmt::Display, + net::IpAddr, + sync::{ + Arc, + atomic::{AtomicU32, Ordering}, + }, +}; + +use chrono::{DateTime, Utc}; +use futures_util::{FutureExt, TryStreamExt, future::BoxFuture}; +use sqlx::{Executor, PgConnection, query, query_as}; +use thiserror::Error; +use thiserror_ext::{Construct, ContextInto}; +use tokio::sync::mpsc::{self, Receiver, Sender}; +use tracing::{Instrument, error, info, warn}; +use uuid::{NonNilUuid, Uuid}; + +use self::{ + constraint_pausing::{ConstraintDescription, IndexDescription}, + locking::LockedMasDatabase, +}; +use crate::Progress; + +pub mod checks; +pub mod locking; + +mod constraint_pausing; + +#[derive(Debug, Error, Construct, ContextInto)] +pub enum Error { + #[error("database error whilst {context}")] + Database { + #[source] + source: sqlx::Error, + context: String, + }, + + #[error("writer connection pool shut down due to error")] + #[expect(clippy::enum_variant_names)] + WriterConnectionPoolError, + + #[error("inconsistent database: {0}")] + Inconsistent(String), + + #[error("bug in syn2mas: write buffers not finished")] + WriteBuffersNotFinished, + + #[error("{0}")] + Multiple(MultipleErrors), +} + +#[derive(Debug)] +pub struct MultipleErrors { + errors: Vec, +} + +impl Display for MultipleErrors { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "multiple errors")?; + for error in &self.errors { + write!(f, "\n- {error}")?; + } + Ok(()) + } +} + +impl From> for MultipleErrors { + fn from(value: Vec) -> Self { + MultipleErrors { errors: value } + } +} + +struct WriterConnectionPool { + /// How many connections are in circulation + num_connections: usize, + + /// A receiver handle to get a writer connection + /// The writer connection will be mid-transaction! + connection_rx: Receiver>, + + /// A sender handle to return a writer connection to the pool + /// The connection should still be mid-transaction! + connection_tx: Sender>, +} + +impl WriterConnectionPool { + pub fn new(connections: Vec) -> Self { + let num_connections = connections.len(); + let (connection_tx, connection_rx) = mpsc::channel(num_connections); + for connection in connections { + connection_tx + .try_send(Ok(connection)) + .expect("there should be room for this connection"); + } + + WriterConnectionPool { + num_connections, + connection_rx, + connection_tx, + } + } + + pub async fn spawn_with_connection(&mut self, task: F) -> Result<(), Error> + where + F: for<'conn> FnOnce(&'conn mut PgConnection) -> BoxFuture<'conn, Result<(), Error>> + + Send + + 'static, + { + match self.connection_rx.recv().await { + Some(Ok(mut connection)) => { + let connection_tx = self.connection_tx.clone(); + tokio::task::spawn( + async move { + let to_return = match task(&mut connection).await { + Ok(()) => Ok(connection), + Err(error) => { + error!("error in writer: {error}"); + Err(error) + } + }; + // This should always succeed in sending unless we're already shutting + // down for some other reason. + let _: Result<_, _> = connection_tx.send(to_return).await; + } + .instrument(tracing::debug_span!("spawn_with_connection")), + ); + + Ok(()) + } + Some(Err(error)) => { + // This should always succeed in sending unless we're already shutting + // down for some other reason. + let _: Result<_, _> = self.connection_tx.send(Err(error)).await; + + Err(Error::WriterConnectionPoolError) + } + None => { + unreachable!("we still hold a reference to the sender, so this shouldn't happen") + } + } + } + + /// Finishes writing to the database, committing all changes. + /// + /// # Errors + /// + /// - If any errors were returned to the pool. + /// - If committing the changes failed. + /// + /// # Panics + /// + /// - If connections were not returned to the pool. (This indicates a + /// serious bug.) + pub async fn finish(self) -> Result<(), Vec> { + let mut errors = Vec::new(); + + let Self { + num_connections, + mut connection_rx, + connection_tx, + } = self; + // Drop the sender handle so we gracefully allow the receiver to close + drop(connection_tx); + + let mut finished_connections = 0; + + while let Some(connection_or_error) = connection_rx.recv().await { + finished_connections += 1; + + match connection_or_error { + Ok(mut connection) => { + if let Err(err) = query("COMMIT;").execute(&mut connection).await { + errors.push(err.into_database("commit writer transaction")); + } + } + Err(error) => { + errors.push(error); + } + } + } + assert_eq!( + finished_connections, num_connections, + "syn2mas had a bug: connections went missing {finished_connections} != {num_connections}" + ); + + if errors.is_empty() { + Ok(()) + } else { + Err(errors) + } + } +} + +/// Small utility to make sure `finish()` is called on all write buffers +/// before committing to the database. +#[derive(Default)] +struct FinishChecker { + counter: Arc, +} + +struct FinishCheckerHandle { + counter: Arc, +} + +impl FinishChecker { + /// Acquire a new handle, for a task that should declare when it has + /// finished. + pub fn handle(&self) -> FinishCheckerHandle { + self.counter.fetch_add(1, Ordering::SeqCst); + FinishCheckerHandle { + counter: Arc::clone(&self.counter), + } + } + + /// Check that all handles have been declared as finished. + pub fn check_all_finished(self) -> Result<(), Error> { + if self.counter.load(Ordering::SeqCst) == 0 { + Ok(()) + } else { + Err(Error::WriteBuffersNotFinished) + } + } +} + +impl FinishCheckerHandle { + /// Declare that the task this handle represents has been finished. + pub fn declare_finished(self) { + self.counter.fetch_sub(1, Ordering::SeqCst); + } +} + +pub struct MasWriter { + conn: LockedMasDatabase, + writer_pool: WriterConnectionPool, + dry_run: bool, + + indices_to_restore: Vec, + constraints_to_restore: Vec, + + write_buffer_finish_checker: FinishChecker, +} + +pub trait WriteBatch: Send + Sync + Sized + 'static { + fn write_batch( + conn: &mut PgConnection, + batch: Vec, + ) -> impl Future> + Send; +} + +pub struct MasNewUser { + pub user_id: NonNilUuid, + pub username: String, + pub created_at: DateTime, + pub locked_at: Option>, + pub deactivated_at: Option>, + pub can_request_admin: bool, + /// Whether the user was a Synapse guest. + /// Although MAS doesn't support guest access, it's still useful to track + /// for the future. + pub is_guest: bool, +} + +impl WriteBatch for MasNewUser { + async fn write_batch(conn: &mut PgConnection, batch: Vec) -> Result<(), Error> { + // `UNNEST` is a fast way to do bulk inserts, as it lets us send multiple rows + // in one statement without having to change the statement + // SQL thus altering the query plan. See . + // In the future we could consider using sqlx's support for `PgCopyIn` / the + // `COPY FROM STDIN` statement, which is allegedly the best + // for insert performance, but is less simple to encode. + let mut user_ids: Vec = Vec::with_capacity(batch.len()); + let mut usernames: Vec = Vec::with_capacity(batch.len()); + let mut created_ats: Vec> = Vec::with_capacity(batch.len()); + let mut locked_ats: Vec>> = Vec::with_capacity(batch.len()); + let mut deactivated_ats: Vec>> = Vec::with_capacity(batch.len()); + let mut can_request_admins: Vec = Vec::with_capacity(batch.len()); + let mut is_guests: Vec = Vec::with_capacity(batch.len()); + for MasNewUser { + user_id, + username, + created_at, + locked_at, + deactivated_at, + can_request_admin, + is_guest, + } in batch + { + user_ids.push(user_id.get()); + usernames.push(username); + created_ats.push(created_at); + locked_ats.push(locked_at); + deactivated_ats.push(deactivated_at); + can_request_admins.push(can_request_admin); + is_guests.push(is_guest); + } + + sqlx::query!( + r#" + INSERT INTO syn2mas__users ( + user_id, username, + created_at, locked_at, + deactivated_at, + can_request_admin, is_guest) + SELECT * FROM UNNEST( + $1::UUID[], $2::TEXT[], + $3::TIMESTAMP WITH TIME ZONE[], $4::TIMESTAMP WITH TIME ZONE[], + $5::TIMESTAMP WITH TIME ZONE[], + $6::BOOL[], $7::BOOL[]) + "#, + &user_ids[..], + &usernames[..], + &created_ats[..], + // We need to override the typing for arrays of optionals (sqlx limitation) + &locked_ats[..] as &[Option>], + &deactivated_ats[..] as &[Option>], + &can_request_admins[..], + &is_guests[..], + ) + .execute(&mut *conn) + .await + .into_database("writing users to MAS")?; + + Ok(()) + } +} + +pub struct MasNewUserPassword { + pub user_password_id: Uuid, + pub user_id: NonNilUuid, + pub hashed_password: String, + pub created_at: DateTime, +} + +impl WriteBatch for MasNewUserPassword { + async fn write_batch(conn: &mut PgConnection, batch: Vec) -> Result<(), Error> { + let mut user_password_ids: Vec = Vec::with_capacity(batch.len()); + let mut user_ids: Vec = Vec::with_capacity(batch.len()); + let mut hashed_passwords: Vec = Vec::with_capacity(batch.len()); + let mut created_ats: Vec> = Vec::with_capacity(batch.len()); + let mut versions: Vec = Vec::with_capacity(batch.len()); + for MasNewUserPassword { + user_password_id, + user_id, + hashed_password, + created_at, + } in batch + { + user_password_ids.push(user_password_id); + user_ids.push(user_id.get()); + hashed_passwords.push(hashed_password); + created_ats.push(created_at); + versions.push(MIGRATED_PASSWORD_VERSION.into()); + } + + sqlx::query!( + r#" + INSERT INTO syn2mas__user_passwords + (user_password_id, user_id, hashed_password, created_at, version) + SELECT * FROM UNNEST($1::UUID[], $2::UUID[], $3::TEXT[], $4::TIMESTAMP WITH TIME ZONE[], $5::INTEGER[]) + "#, + &user_password_ids[..], + &user_ids[..], + &hashed_passwords[..], + &created_ats[..], + &versions[..], + ).execute(&mut *conn).await.into_database("writing users to MAS")?; + + Ok(()) + } +} + +pub struct MasNewEmailThreepid { + pub user_email_id: Uuid, + pub user_id: NonNilUuid, + pub email: String, + pub created_at: DateTime, +} + +impl WriteBatch for MasNewEmailThreepid { + async fn write_batch(conn: &mut PgConnection, batch: Vec) -> Result<(), Error> { + let mut user_email_ids: Vec = Vec::with_capacity(batch.len()); + let mut user_ids: Vec = Vec::with_capacity(batch.len()); + let mut emails: Vec = Vec::with_capacity(batch.len()); + let mut created_ats: Vec> = Vec::with_capacity(batch.len()); + + for MasNewEmailThreepid { + user_email_id, + user_id, + email, + created_at, + } in batch + { + user_email_ids.push(user_email_id); + user_ids.push(user_id.get()); + emails.push(email); + created_ats.push(created_at); + } + + sqlx::query!( + r#" + INSERT INTO syn2mas__user_emails + (user_email_id, user_id, email, created_at) + SELECT * FROM UNNEST($1::UUID[], $2::UUID[], $3::TEXT[], $4::TIMESTAMP WITH TIME ZONE[]) + "#, + &user_email_ids[..], + &user_ids[..], + &emails[..], + &created_ats[..], + ) + .execute(&mut *conn) + .await + .into_database("writing emails to MAS")?; + + Ok(()) + } +} + +pub struct MasNewUnsupportedThreepid { + pub user_id: NonNilUuid, + pub medium: String, + pub address: String, + pub created_at: DateTime, +} + +impl WriteBatch for MasNewUnsupportedThreepid { + async fn write_batch(conn: &mut PgConnection, batch: Vec) -> Result<(), Error> { + let mut user_ids: Vec = Vec::with_capacity(batch.len()); + let mut mediums: Vec = Vec::with_capacity(batch.len()); + let mut addresses: Vec = Vec::with_capacity(batch.len()); + let mut created_ats: Vec> = Vec::with_capacity(batch.len()); + + for MasNewUnsupportedThreepid { + user_id, + medium, + address, + created_at, + } in batch + { + user_ids.push(user_id.get()); + mediums.push(medium); + addresses.push(address); + created_ats.push(created_at); + } + + sqlx::query!( + r#" + INSERT INTO syn2mas__user_unsupported_third_party_ids + (user_id, medium, address, created_at) + SELECT * FROM UNNEST($1::UUID[], $2::TEXT[], $3::TEXT[], $4::TIMESTAMP WITH TIME ZONE[]) + "#, + &user_ids[..], + &mediums[..], + &addresses[..], + &created_ats[..], + ) + .execute(&mut *conn) + .await + .into_database("writing unsupported threepids to MAS")?; + + Ok(()) + } +} + +pub struct MasNewUpstreamOauthLink { + pub link_id: Uuid, + pub user_id: NonNilUuid, + pub upstream_provider_id: Uuid, + pub subject: String, + pub created_at: DateTime, +} + +impl WriteBatch for MasNewUpstreamOauthLink { + async fn write_batch(conn: &mut PgConnection, batch: Vec) -> Result<(), Error> { + let mut link_ids: Vec = Vec::with_capacity(batch.len()); + let mut user_ids: Vec = Vec::with_capacity(batch.len()); + let mut upstream_provider_ids: Vec = Vec::with_capacity(batch.len()); + let mut subjects: Vec = Vec::with_capacity(batch.len()); + let mut created_ats: Vec> = Vec::with_capacity(batch.len()); + + for MasNewUpstreamOauthLink { + link_id, + user_id, + upstream_provider_id, + subject, + created_at, + } in batch + { + link_ids.push(link_id); + user_ids.push(user_id.get()); + upstream_provider_ids.push(upstream_provider_id); + subjects.push(subject); + created_ats.push(created_at); + } + + sqlx::query!( + r#" + INSERT INTO syn2mas__upstream_oauth_links + (upstream_oauth_link_id, user_id, upstream_oauth_provider_id, subject, created_at) + SELECT * FROM UNNEST($1::UUID[], $2::UUID[], $3::UUID[], $4::TEXT[], $5::TIMESTAMP WITH TIME ZONE[]) + "#, + &link_ids[..], + &user_ids[..], + &upstream_provider_ids[..], + &subjects[..], + &created_ats[..], + ).execute(&mut *conn).await.into_database("writing unsupported threepids to MAS")?; + + Ok(()) + } +} + +pub struct MasNewCompatSession { + pub session_id: Uuid, + pub user_id: NonNilUuid, + pub device_id: Option, + pub human_name: Option, + pub created_at: DateTime, + pub is_synapse_admin: bool, + pub last_active_at: Option>, + pub last_active_ip: Option, + pub user_agent: Option, +} + +impl WriteBatch for MasNewCompatSession { + async fn write_batch(conn: &mut PgConnection, batch: Vec) -> Result<(), Error> { + let mut session_ids: Vec = Vec::with_capacity(batch.len()); + let mut user_ids: Vec = Vec::with_capacity(batch.len()); + let mut device_ids: Vec> = Vec::with_capacity(batch.len()); + let mut human_names: Vec> = Vec::with_capacity(batch.len()); + let mut created_ats: Vec> = Vec::with_capacity(batch.len()); + let mut is_synapse_admins: Vec = Vec::with_capacity(batch.len()); + let mut last_active_ats: Vec>> = Vec::with_capacity(batch.len()); + let mut last_active_ips: Vec> = Vec::with_capacity(batch.len()); + let mut user_agents: Vec> = Vec::with_capacity(batch.len()); + + for MasNewCompatSession { + session_id, + user_id, + device_id, + human_name, + created_at, + is_synapse_admin, + last_active_at, + last_active_ip, + user_agent, + } in batch + { + session_ids.push(session_id); + user_ids.push(user_id.get()); + device_ids.push(device_id); + human_names.push(human_name); + created_ats.push(created_at); + is_synapse_admins.push(is_synapse_admin); + last_active_ats.push(last_active_at); + last_active_ips.push(last_active_ip); + user_agents.push(user_agent); + } + + sqlx::query!( + r#" + INSERT INTO syn2mas__compat_sessions ( + compat_session_id, user_id, + device_id, human_name, + created_at, is_synapse_admin, + last_active_at, last_active_ip, + user_agent) + SELECT * FROM UNNEST( + $1::UUID[], $2::UUID[], + $3::TEXT[], $4::TEXT[], + $5::TIMESTAMP WITH TIME ZONE[], $6::BOOLEAN[], + $7::TIMESTAMP WITH TIME ZONE[], $8::INET[], + $9::TEXT[]) + "#, + &session_ids[..], + &user_ids[..], + &device_ids[..] as &[Option], + &human_names[..] as &[Option], + &created_ats[..], + &is_synapse_admins[..], + // We need to override the typing for arrays of optionals (sqlx limitation) + &last_active_ats[..] as &[Option>], + &last_active_ips[..] as &[Option], + &user_agents[..] as &[Option], + ) + .execute(&mut *conn) + .await + .into_database("writing compat sessions to MAS")?; + + Ok(()) + } +} + +pub struct MasNewCompatAccessToken { + pub token_id: Uuid, + pub session_id: Uuid, + pub access_token: String, + pub created_at: DateTime, + pub expires_at: Option>, +} + +impl WriteBatch for MasNewCompatAccessToken { + async fn write_batch(conn: &mut PgConnection, batch: Vec) -> Result<(), Error> { + let mut token_ids: Vec = Vec::with_capacity(batch.len()); + let mut session_ids: Vec = Vec::with_capacity(batch.len()); + let mut access_tokens: Vec = Vec::with_capacity(batch.len()); + let mut created_ats: Vec> = Vec::with_capacity(batch.len()); + let mut expires_ats: Vec>> = Vec::with_capacity(batch.len()); + + for MasNewCompatAccessToken { + token_id, + session_id, + access_token, + created_at, + expires_at, + } in batch + { + token_ids.push(token_id); + session_ids.push(session_id); + access_tokens.push(access_token); + created_ats.push(created_at); + expires_ats.push(expires_at); + } + + sqlx::query!( + r#" + INSERT INTO syn2mas__compat_access_tokens ( + compat_access_token_id, + compat_session_id, + access_token, + created_at, + expires_at) + SELECT * FROM UNNEST( + $1::UUID[], + $2::UUID[], + $3::TEXT[], + $4::TIMESTAMP WITH TIME ZONE[], + $5::TIMESTAMP WITH TIME ZONE[]) + "#, + &token_ids[..], + &session_ids[..], + &access_tokens[..], + &created_ats[..], + // We need to override the typing for arrays of optionals (sqlx limitation) + &expires_ats[..] as &[Option>], + ) + .execute(&mut *conn) + .await + .into_database("writing compat access tokens to MAS")?; + + Ok(()) + } +} + +pub struct MasNewCompatRefreshToken { + pub refresh_token_id: Uuid, + pub session_id: Uuid, + pub access_token_id: Uuid, + pub refresh_token: String, + pub created_at: DateTime, +} + +impl WriteBatch for MasNewCompatRefreshToken { + async fn write_batch(conn: &mut PgConnection, batch: Vec) -> Result<(), Error> { + let mut refresh_token_ids: Vec = Vec::with_capacity(batch.len()); + let mut session_ids: Vec = Vec::with_capacity(batch.len()); + let mut access_token_ids: Vec = Vec::with_capacity(batch.len()); + let mut refresh_tokens: Vec = Vec::with_capacity(batch.len()); + let mut created_ats: Vec> = Vec::with_capacity(batch.len()); + + for MasNewCompatRefreshToken { + refresh_token_id, + session_id, + access_token_id, + refresh_token, + created_at, + } in batch + { + refresh_token_ids.push(refresh_token_id); + session_ids.push(session_id); + access_token_ids.push(access_token_id); + refresh_tokens.push(refresh_token); + created_ats.push(created_at); + } + + sqlx::query!( + r#" + INSERT INTO syn2mas__compat_refresh_tokens ( + compat_refresh_token_id, + compat_session_id, + compat_access_token_id, + refresh_token, + created_at) + SELECT * FROM UNNEST( + $1::UUID[], + $2::UUID[], + $3::UUID[], + $4::TEXT[], + $5::TIMESTAMP WITH TIME ZONE[]) + "#, + &refresh_token_ids[..], + &session_ids[..], + &access_token_ids[..], + &refresh_tokens[..], + &created_ats[..], + ) + .execute(&mut *conn) + .await + .into_database("writing compat refresh tokens to MAS")?; + + Ok(()) + } +} + +/// The 'version' of the password hashing scheme used for passwords when they +/// are migrated from Synapse to MAS. +/// This is version 1, as in the previous syn2mas script. +// TODO hardcoding version to `1` may not be correct long-term? +pub const MIGRATED_PASSWORD_VERSION: u16 = 1; + +/// List of all MAS tables that are written to by syn2mas. +pub const MAS_TABLES_AFFECTED_BY_MIGRATION: &[&str] = &[ + "users", + "user_passwords", + "user_emails", + "user_unsupported_third_party_ids", + "upstream_oauth_links", + "compat_sessions", + "compat_access_tokens", + "compat_refresh_tokens", +]; + +/// Detect whether a syn2mas migration has started on the given database. +/// +/// Concretly, this checks for the presence of syn2mas restoration tables. +/// +/// Returns `true` if syn2mas has started, or `false` if it hasn't. +/// +/// # Errors +/// +/// Errors are returned under the following circumstances: +/// +/// - If any database error occurs whilst querying the database. +/// - If some, but not all, syn2mas restoration tables are present. (This +/// shouldn't be possible without syn2mas having been sabotaged!) +pub async fn is_syn2mas_in_progress(conn: &mut PgConnection) -> Result { + // Names of tables used for syn2mas resumption + // Must be `String`s, not just `&str`, for the query. + let restore_table_names = vec![ + "syn2mas_restore_constraints".to_owned(), + "syn2mas_restore_indices".to_owned(), + ]; + + let num_resumption_tables = query!( + r#" + SELECT 1 AS _dummy FROM pg_tables WHERE schemaname = current_schema + AND tablename = ANY($1) + "#, + &restore_table_names, + ) + .fetch_all(conn.as_mut()) + .await + .into_database("failed to query count of resumption tables")? + .len(); + + if num_resumption_tables == 0 { + Ok(false) + } else if num_resumption_tables == restore_table_names.len() { + Ok(true) + } else { + Err(Error::inconsistent( + "some, but not all, syn2mas resumption tables were found", + )) + } +} + +impl MasWriter { + /// Creates a new MAS writer. + /// + /// # Errors + /// + /// Errors are returned in the following conditions: + /// + /// - If the database connection experiences an error. + #[tracing::instrument(name = "syn2mas.mas_writer.new", skip_all)] + pub async fn new( + mut conn: LockedMasDatabase, + mut writer_connections: Vec, + dry_run: bool, + ) -> Result { + // Given that we don't have any concurrent transactions here, + // the READ COMMITTED isolation level is sufficient. + query("BEGIN TRANSACTION ISOLATION LEVEL READ COMMITTED;") + .execute(conn.as_mut()) + .await + .into_database("begin MAS transaction")?; + + let syn2mas_started = is_syn2mas_in_progress(conn.as_mut()).await?; + + let indices_to_restore; + let constraints_to_restore; + + if syn2mas_started { + // We are resuming from a partially-done syn2mas migration + // We should reset the database so that we're starting from scratch. + warn!("Partial syn2mas migration has already been done; resetting."); + for table in MAS_TABLES_AFFECTED_BY_MIGRATION { + query(&format!("TRUNCATE syn2mas__{table};")) + .execute(conn.as_mut()) + .await + .into_database_with(|| format!("failed to truncate table syn2mas__{table}"))?; + } + + indices_to_restore = query_as!( + IndexDescription, + "SELECT table_name, name, definition FROM syn2mas_restore_indices ORDER BY order_key" + ) + .fetch_all(conn.as_mut()) + .await + .into_database("failed to get syn2mas restore data (index descriptions)")?; + constraints_to_restore = query_as!( + ConstraintDescription, + "SELECT table_name, name, definition FROM syn2mas_restore_constraints ORDER BY order_key" + ) + .fetch_all(conn.as_mut()) + .await + .into_database("failed to get syn2mas restore data (constraint descriptions)")?; + } else { + info!("Starting new syn2mas migration"); + + conn.as_mut() + .execute_many(include_str!("syn2mas_temporary_tables.sql")) + // We don't care about any query results + .try_collect::>() + .await + .into_database("could not create temporary tables")?; + + // Pause (temporarily drop) indices and constraints in order to improve + // performance of bulk data loading. + (indices_to_restore, constraints_to_restore) = + Self::pause_indices(conn.as_mut()).await?; + + // Persist these index and constraint definitions. + for IndexDescription { + name, + table_name, + definition, + } in &indices_to_restore + { + query!( + r#" + INSERT INTO syn2mas_restore_indices (name, table_name, definition) + VALUES ($1, $2, $3) + "#, + name, + table_name, + definition + ) + .execute(conn.as_mut()) + .await + .into_database("failed to save restore data (index)")?; + } + for ConstraintDescription { + name, + table_name, + definition, + } in &constraints_to_restore + { + query!( + r#" + INSERT INTO syn2mas_restore_constraints (name, table_name, definition) + VALUES ($1, $2, $3) + "#, + name, + table_name, + definition + ) + .execute(conn.as_mut()) + .await + .into_database("failed to save restore data (index)")?; + } + } + + query("COMMIT;") + .execute(conn.as_mut()) + .await + .into_database("begin MAS transaction")?; + + // Now after all the schema changes have been done, begin writer transactions + for writer_connection in &mut writer_connections { + query("BEGIN TRANSACTION ISOLATION LEVEL READ COMMITTED;") + .execute(&mut *writer_connection) + .await + .into_database("begin MAS writer transaction")?; + } + + Ok(Self { + conn, + dry_run, + writer_pool: WriterConnectionPool::new(writer_connections), + indices_to_restore, + constraints_to_restore, + write_buffer_finish_checker: FinishChecker::default(), + }) + } + + #[tracing::instrument(skip_all)] + async fn pause_indices( + conn: &mut PgConnection, + ) -> Result<(Vec, Vec), Error> { + let mut indices_to_restore = Vec::new(); + let mut constraints_to_restore = Vec::new(); + + for &unprefixed_table in MAS_TABLES_AFFECTED_BY_MIGRATION { + let table = format!("syn2mas__{unprefixed_table}"); + // First drop incoming foreign key constraints + for constraint in + constraint_pausing::describe_foreign_key_constraints_to_table(&mut *conn, &table) + .await? + { + constraint_pausing::drop_constraint(&mut *conn, &constraint).await?; + constraints_to_restore.push(constraint); + } + // After all incoming foreign key constraints have been removed, + // we can now drop internal constraints. + for constraint in + constraint_pausing::describe_constraints_on_table(&mut *conn, &table).await? + { + constraint_pausing::drop_constraint(&mut *conn, &constraint).await?; + constraints_to_restore.push(constraint); + } + // After all constraints have been removed, we can drop indices. + for index in constraint_pausing::describe_indices_on_table(&mut *conn, &table).await? { + constraint_pausing::drop_index(&mut *conn, &index).await?; + indices_to_restore.push(index); + } + } + + Ok((indices_to_restore, constraints_to_restore)) + } + + async fn restore_indices( + conn: &mut LockedMasDatabase, + indices_to_restore: &[IndexDescription], + constraints_to_restore: &[ConstraintDescription], + progress: &Progress, + ) -> Result<(), Error> { + // First restore all indices. The order is not important as far as I know. + // However the indices are needed before constraints. + for index in indices_to_restore.iter().rev() { + progress.rebuild_index(index.name.clone()); + constraint_pausing::restore_index(conn.as_mut(), index).await?; + } + // Then restore all constraints. + // The order here is the reverse of drop order, since some constraints may rely + // on other constraints to work. + for constraint in constraints_to_restore.iter().rev() { + progress.rebuild_constraint(constraint.name.clone()); + constraint_pausing::restore_constraint(conn.as_mut(), constraint).await?; + } + Ok(()) + } + + /// Finish writing to the MAS database, flushing and committing all changes. + /// It returns the unlocked underlying connection. + /// + /// # Errors + /// + /// Errors are returned in the following conditions: + /// + /// - If the database connection experiences an error. + #[tracing::instrument(skip_all)] + pub async fn finish(mut self, progress: &Progress) -> Result { + self.write_buffer_finish_checker.check_all_finished()?; + + // Commit all writer transactions to the database. + self.writer_pool + .finish() + .await + .map_err(|errors| Error::Multiple(MultipleErrors::from(errors)))?; + + // Now all the data has been migrated, finish off by restoring indices and + // constraints! + query("BEGIN TRANSACTION ISOLATION LEVEL READ COMMITTED;") + .execute(self.conn.as_mut()) + .await + .into_database("begin MAS transaction")?; + + Self::restore_indices( + &mut self.conn, + &self.indices_to_restore, + &self.constraints_to_restore, + progress, + ) + .await?; + + self.conn + .as_mut() + .execute_many(include_str!("syn2mas_revert_temporary_tables.sql")) + // We don't care about any query results + .try_collect::>() + .await + .into_database("could not revert temporary tables")?; + + // If we're in dry-run mode, truncate all the tables we've written to + if self.dry_run { + warn!("Migration ran in dry-run mode, deleting all imported data"); + let tables = MAS_TABLES_AFFECTED_BY_MIGRATION + .iter() + .map(|table| format!("\"{table}\"")) + .collect::>() + .join(", "); + + // Note that we do that with CASCADE, because we do that *after* + // restoring the FK constraints. + // + // The alternative would be to list all the tables we have FK to + // those tables, which would be a hassle, or to do that after + // restoring the constraints, which would mean we wouldn't validate + // that we've done valid FKs in dry-run mode. + query(&format!("TRUNCATE TABLE {tables} CASCADE;")) + .execute(self.conn.as_mut()) + .await + .into_database_with(|| "failed to truncate all tables")?; + } + + query("COMMIT;") + .execute(self.conn.as_mut()) + .await + .into_database("ending MAS transaction")?; + + let conn = self + .conn + .unlock() + .await + .into_database("could not unlock MAS database")?; + + Ok(conn) + } +} + +// How many entries to buffer at once, before writing a batch of rows to the +// database. +const WRITE_BUFFER_BATCH_SIZE: usize = 4096; + +/// A buffer for writing rows to the MAS database. +/// Generic over the type of rows. +pub struct MasWriteBuffer { + rows: Vec, + finish_checker_handle: FinishCheckerHandle, +} + +impl MasWriteBuffer +where + T: WriteBatch, +{ + pub fn new(writer: &MasWriter) -> Self { + MasWriteBuffer { + rows: Vec::with_capacity(WRITE_BUFFER_BATCH_SIZE), + finish_checker_handle: writer.write_buffer_finish_checker.handle(), + } + } + + pub async fn finish(mut self, writer: &mut MasWriter) -> Result<(), Error> { + self.flush(writer).await?; + self.finish_checker_handle.declare_finished(); + Ok(()) + } + + pub async fn flush(&mut self, writer: &mut MasWriter) -> Result<(), Error> { + if self.rows.is_empty() { + return Ok(()); + } + let rows = std::mem::take(&mut self.rows); + self.rows.reserve_exact(WRITE_BUFFER_BATCH_SIZE); + writer + .writer_pool + .spawn_with_connection(move |conn| T::write_batch(conn, rows).boxed()) + .boxed() + .await?; + Ok(()) + } + + pub async fn write(&mut self, writer: &mut MasWriter, row: T) -> Result<(), Error> { + self.rows.push(row); + if self.rows.len() >= WRITE_BUFFER_BATCH_SIZE { + self.flush(writer).await?; + } + Ok(()) + } +} + +#[cfg(test)] +mod test { + use std::collections::{BTreeMap, BTreeSet}; + + use chrono::DateTime; + use futures_util::TryStreamExt; + use serde::Serialize; + use sqlx::{Column, PgConnection, PgPool, Row}; + use uuid::{NonNilUuid, Uuid}; + + use crate::{ + LockedMasDatabase, MasWriter, Progress, + mas_writer::{ + MasNewCompatAccessToken, MasNewCompatRefreshToken, MasNewCompatSession, + MasNewEmailThreepid, MasNewUnsupportedThreepid, MasNewUpstreamOauthLink, MasNewUser, + MasNewUserPassword, MasWriteBuffer, + }, + }; + + /// A snapshot of a whole database + #[derive(Default, Serialize)] + #[serde(transparent)] + struct DatabaseSnapshot { + tables: BTreeMap, + } + + #[derive(Serialize)] + #[serde(transparent)] + struct TableSnapshot { + rows: BTreeSet, + } + + #[derive(PartialEq, Eq, PartialOrd, Ord, Serialize)] + #[serde(transparent)] + struct RowSnapshot { + columns_to_values: BTreeMap>, + } + + const SKIPPED_TABLES: &[&str] = &["_sqlx_migrations"]; + + /// Produces a serialisable snapshot of a database, usable for snapshot + /// testing + /// + /// For brevity, empty tables, as well as [`SKIPPED_TABLES`], will not be + /// included in the snapshot. + async fn snapshot_database(conn: &mut PgConnection) -> DatabaseSnapshot { + let mut out = DatabaseSnapshot::default(); + let table_names: Vec = sqlx::query_scalar( + "SELECT table_name FROM information_schema.tables WHERE table_schema = current_schema();", + ) + .fetch_all(&mut *conn) + .await + .unwrap(); + + for table_name in table_names { + if SKIPPED_TABLES.contains(&table_name.as_str()) { + continue; + } + + let column_names: Vec = sqlx::query_scalar( + "SELECT column_name FROM information_schema.columns WHERE table_name = $1 AND table_schema = current_schema();" + ).bind(&table_name).fetch_all(&mut *conn).await.expect("failed to get column names for table for snapshotting"); + + let column_name_list = column_names + .iter() + // stringify all the values for simplicity + .map(|column_name| format!("{column_name}::TEXT AS \"{column_name}\"")) + .collect::>() + .join(", "); + + let table_rows = sqlx::query(&format!("SELECT {column_name_list} FROM {table_name};")) + .fetch(&mut *conn) + .map_ok(|row| { + let mut columns_to_values = BTreeMap::new(); + for (idx, column) in row.columns().iter().enumerate() { + columns_to_values.insert(column.name().to_owned(), row.get(idx)); + } + RowSnapshot { columns_to_values } + }) + .try_collect::>() + .await + .expect("failed to fetch rows from table for snapshotting"); + + if !table_rows.is_empty() { + out.tables + .insert(table_name, TableSnapshot { rows: table_rows }); + } + } + + out + } + + /// Make a snapshot assertion against the database. + macro_rules! assert_db_snapshot { + ($db: expr) => { + let db_snapshot = snapshot_database($db).await; + ::insta::assert_yaml_snapshot!(db_snapshot); + }; + } + + /// Runs some code with a `MasWriter`. + /// + /// The callback is responsible for `finish`ing the `MasWriter`. + async fn make_mas_writer(pool: &PgPool) -> MasWriter { + let main_conn = pool.acquire().await.unwrap().detach(); + let mut writer_conns = Vec::new(); + for _ in 0..2 { + writer_conns.push( + pool.acquire() + .await + .expect("failed to acquire MasWriter writer connection") + .detach(), + ); + } + let locked_main_conn = LockedMasDatabase::try_new(main_conn) + .await + .expect("failed to lock MAS database") + .expect_left("MAS database is already locked"); + MasWriter::new(locked_main_conn, writer_conns, false) + .await + .expect("failed to construct MasWriter") + } + + /// Tests writing a single user, without a password. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_write_user(pool: PgPool) { + let mut writer = make_mas_writer(&pool).await; + let mut buffer = MasWriteBuffer::new(&writer); + + buffer + .write( + &mut writer, + MasNewUser { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + username: "alice".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }, + ) + .await + .expect("failed to write user"); + + buffer + .finish(&mut writer) + .await + .expect("failed to finish MasWriter"); + + let mut conn = writer + .finish(&Progress::default()) + .await + .expect("failed to finish MasWriter"); + + assert_db_snapshot!(&mut conn); + } + + /// Tests writing a single user, with a password. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_write_user_with_password(pool: PgPool) { + const USER_ID: NonNilUuid = NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(); + + let mut writer = make_mas_writer(&pool).await; + + let mut user_buffer = MasWriteBuffer::new(&writer); + let mut password_buffer = MasWriteBuffer::new(&writer); + + user_buffer + .write( + &mut writer, + MasNewUser { + user_id: USER_ID, + username: "alice".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }, + ) + .await + .expect("failed to write user"); + + password_buffer + .write( + &mut writer, + MasNewUserPassword { + user_password_id: Uuid::from_u128(42u128), + user_id: USER_ID, + hashed_password: "$bcrypt$aaaaaaaaaaa".to_owned(), + created_at: DateTime::default(), + }, + ) + .await + .expect("failed to write password"); + + user_buffer + .finish(&mut writer) + .await + .expect("failed to finish MasWriteBuffer"); + password_buffer + .finish(&mut writer) + .await + .expect("failed to finish MasWriteBuffer"); + + let mut conn = writer + .finish(&Progress::default()) + .await + .expect("failed to finish MasWriter"); + + assert_db_snapshot!(&mut conn); + } + + /// Tests writing a single user, with an e-mail address associated. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_write_user_with_email(pool: PgPool) { + let mut writer = make_mas_writer(&pool).await; + + let mut user_buffer = MasWriteBuffer::new(&writer); + let mut email_buffer = MasWriteBuffer::new(&writer); + + user_buffer + .write( + &mut writer, + MasNewUser { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + username: "alice".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }, + ) + .await + .expect("failed to write user"); + + email_buffer + .write( + &mut writer, + MasNewEmailThreepid { + user_email_id: Uuid::from_u128(2u128), + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + email: "alice@example.org".to_owned(), + created_at: DateTime::default(), + }, + ) + .await + .expect("failed to write e-mail"); + + user_buffer + .finish(&mut writer) + .await + .expect("failed to finish user buffer"); + email_buffer + .finish(&mut writer) + .await + .expect("failed to finish email buffer"); + + let mut conn = writer + .finish(&Progress::default()) + .await + .expect("failed to finish MasWriter"); + + assert_db_snapshot!(&mut conn); + } + + /// Tests writing a single user, with a unsupported third-party ID + /// associated. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_write_user_with_unsupported_threepid(pool: PgPool) { + let mut writer = make_mas_writer(&pool).await; + + let mut user_buffer = MasWriteBuffer::new(&writer); + let mut threepid_buffer = MasWriteBuffer::new(&writer); + + user_buffer + .write( + &mut writer, + MasNewUser { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + username: "alice".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }, + ) + .await + .expect("failed to write user"); + + threepid_buffer + .write( + &mut writer, + MasNewUnsupportedThreepid { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + medium: "msisdn".to_owned(), + address: "441189998819991197253".to_owned(), + created_at: DateTime::default(), + }, + ) + .await + .expect("failed to write phone number (unsupported threepid)"); + + user_buffer + .finish(&mut writer) + .await + .expect("failed to finish user buffer"); + threepid_buffer + .finish(&mut writer) + .await + .expect("failed to finish threepid buffer"); + + let mut conn = writer + .finish(&Progress::default()) + .await + .expect("failed to finish MasWriter"); + + assert_db_snapshot!(&mut conn); + } + + /// Tests writing a single user, with a link to an upstream provider. + /// There needs to be an upstream provider in the database already — in the + /// real migration, this is done by running a provider sync first. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR", fixtures("upstream_provider"))] + async fn test_write_user_with_upstream_provider_link(pool: PgPool) { + let mut writer = make_mas_writer(&pool).await; + + let mut user_buffer = MasWriteBuffer::new(&writer); + let mut link_buffer = MasWriteBuffer::new(&writer); + + user_buffer + .write( + &mut writer, + MasNewUser { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + username: "alice".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }, + ) + .await + .expect("failed to write user"); + + link_buffer + .write( + &mut writer, + MasNewUpstreamOauthLink { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + link_id: Uuid::from_u128(3u128), + upstream_provider_id: Uuid::from_u128(4u128), + subject: "12345.67890".to_owned(), + created_at: DateTime::default(), + }, + ) + .await + .expect("failed to write link"); + + user_buffer + .finish(&mut writer) + .await + .expect("failed to finish user buffer"); + link_buffer + .finish(&mut writer) + .await + .expect("failed to finish link buffer"); + + let mut conn = writer + .finish(&Progress::default()) + .await + .expect("failed to finish MasWriter"); + + assert_db_snapshot!(&mut conn); + } + + /// Tests writing a single user, with a device (compat session). + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_write_user_with_device(pool: PgPool) { + let mut writer = make_mas_writer(&pool).await; + + let mut user_buffer = MasWriteBuffer::new(&writer); + let mut session_buffer = MasWriteBuffer::new(&writer); + + user_buffer + .write( + &mut writer, + MasNewUser { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + username: "alice".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }, + ) + .await + .expect("failed to write user"); + + session_buffer + .write( + &mut writer, + MasNewCompatSession { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + session_id: Uuid::from_u128(5u128), + created_at: DateTime::default(), + device_id: Some("ADEVICE".to_owned()), + human_name: Some("alice's pinephone".to_owned()), + is_synapse_admin: true, + last_active_at: Some(DateTime::default()), + last_active_ip: Some("203.0.113.1".parse().unwrap()), + user_agent: Some("Browser/5.0".to_owned()), + }, + ) + .await + .expect("failed to write compat session"); + + user_buffer + .finish(&mut writer) + .await + .expect("failed to finish user buffer"); + session_buffer + .finish(&mut writer) + .await + .expect("failed to finish session buffer"); + + let mut conn = writer + .finish(&Progress::default()) + .await + .expect("failed to finish MasWriter"); + + assert_db_snapshot!(&mut conn); + } + + /// Tests writing a single user, with a device and an access token. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_write_user_with_access_token(pool: PgPool) { + let mut writer = make_mas_writer(&pool).await; + + let mut user_buffer = MasWriteBuffer::new(&writer); + let mut session_buffer = MasWriteBuffer::new(&writer); + let mut token_buffer = MasWriteBuffer::new(&writer); + + user_buffer + .write( + &mut writer, + MasNewUser { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + username: "alice".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }, + ) + .await + .expect("failed to write user"); + + session_buffer + .write( + &mut writer, + MasNewCompatSession { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + session_id: Uuid::from_u128(5u128), + created_at: DateTime::default(), + device_id: Some("ADEVICE".to_owned()), + human_name: None, + is_synapse_admin: false, + last_active_at: None, + last_active_ip: None, + user_agent: None, + }, + ) + .await + .expect("failed to write compat session"); + + token_buffer + .write( + &mut writer, + MasNewCompatAccessToken { + token_id: Uuid::from_u128(6u128), + session_id: Uuid::from_u128(5u128), + access_token: "syt_zxcvzxcvzxcvzxcv_zxcv".to_owned(), + created_at: DateTime::default(), + expires_at: None, + }, + ) + .await + .expect("failed to write access token"); + + user_buffer + .finish(&mut writer) + .await + .expect("failed to finish user buffer"); + session_buffer + .finish(&mut writer) + .await + .expect("failed to finish session buffer"); + token_buffer + .finish(&mut writer) + .await + .expect("failed to finish token buffer"); + + let mut conn = writer + .finish(&Progress::default()) + .await + .expect("failed to finish MasWriter"); + + assert_db_snapshot!(&mut conn); + } + + /// Tests writing a single user, with a device, an access token and a + /// refresh token. + #[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")] + async fn test_write_user_with_refresh_token(pool: PgPool) { + let mut writer = make_mas_writer(&pool).await; + + let mut user_buffer = MasWriteBuffer::new(&writer); + let mut session_buffer = MasWriteBuffer::new(&writer); + let mut token_buffer = MasWriteBuffer::new(&writer); + let mut refresh_token_buffer = MasWriteBuffer::new(&writer); + + user_buffer + .write( + &mut writer, + MasNewUser { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + username: "alice".to_owned(), + created_at: DateTime::default(), + locked_at: None, + deactivated_at: None, + can_request_admin: false, + is_guest: false, + }, + ) + .await + .expect("failed to write user"); + + session_buffer + .write( + &mut writer, + MasNewCompatSession { + user_id: NonNilUuid::new(Uuid::from_u128(1u128)).unwrap(), + session_id: Uuid::from_u128(5u128), + created_at: DateTime::default(), + device_id: Some("ADEVICE".to_owned()), + human_name: None, + is_synapse_admin: false, + last_active_at: None, + last_active_ip: None, + user_agent: None, + }, + ) + .await + .expect("failed to write compat session"); + + token_buffer + .write( + &mut writer, + MasNewCompatAccessToken { + token_id: Uuid::from_u128(6u128), + session_id: Uuid::from_u128(5u128), + access_token: "syt_zxcvzxcvzxcvzxcv_zxcv".to_owned(), + created_at: DateTime::default(), + expires_at: None, + }, + ) + .await + .expect("failed to write access token"); + + refresh_token_buffer + .write( + &mut writer, + MasNewCompatRefreshToken { + refresh_token_id: Uuid::from_u128(7u128), + session_id: Uuid::from_u128(5u128), + access_token_id: Uuid::from_u128(6u128), + refresh_token: "syr_zxcvzxcvzxcvzxcv_zxcv".to_owned(), + created_at: DateTime::default(), + }, + ) + .await + .expect("failed to write refresh token"); + + user_buffer + .finish(&mut writer) + .await + .expect("failed to finish user buffer"); + session_buffer + .finish(&mut writer) + .await + .expect("failed to finish session buffer"); + token_buffer + .finish(&mut writer) + .await + .expect("failed to finish token buffer"); + refresh_token_buffer + .finish(&mut writer) + .await + .expect("failed to finish refresh token buffer"); + + let mut conn = writer + .finish(&Progress::default()) + .await + .expect("failed to finish MasWriter"); + + assert_db_snapshot!(&mut conn); + } +} diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user.snap b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user.snap new file mode 100644 index 00000000..76628de2 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user.snap @@ -0,0 +1,12 @@ +--- +source: crates/syn2mas/src/mas_writer/mod.rs +expression: db_snapshot +--- +users: + - can_request_admin: "false" + created_at: "1970-01-01 00:00:00+00" + deactivated_at: ~ + is_guest: "false" + locked_at: ~ + user_id: 00000000-0000-0000-0000-000000000001 + username: alice diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_access_token.snap b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_access_token.snap new file mode 100644 index 00000000..5f947e45 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_access_token.snap @@ -0,0 +1,30 @@ +--- +source: crates/syn2mas/src/mas_writer/mod.rs +expression: db_snapshot +--- +compat_access_tokens: + - access_token: syt_zxcvzxcvzxcvzxcv_zxcv + compat_access_token_id: 00000000-0000-0000-0000-000000000006 + compat_session_id: 00000000-0000-0000-0000-000000000005 + created_at: "1970-01-01 00:00:00+00" + expires_at: ~ +compat_sessions: + - compat_session_id: 00000000-0000-0000-0000-000000000005 + created_at: "1970-01-01 00:00:00+00" + device_id: ADEVICE + finished_at: ~ + human_name: ~ + is_synapse_admin: "false" + last_active_at: ~ + last_active_ip: ~ + user_agent: ~ + user_id: 00000000-0000-0000-0000-000000000001 + user_session_id: ~ +users: + - can_request_admin: "false" + created_at: "1970-01-01 00:00:00+00" + deactivated_at: ~ + is_guest: "false" + locked_at: ~ + user_id: 00000000-0000-0000-0000-000000000001 + username: alice diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_device.snap b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_device.snap new file mode 100644 index 00000000..f21ba378 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_device.snap @@ -0,0 +1,24 @@ +--- +source: crates/syn2mas/src/mas_writer/mod.rs +expression: db_snapshot +--- +compat_sessions: + - compat_session_id: 00000000-0000-0000-0000-000000000005 + created_at: "1970-01-01 00:00:00+00" + device_id: ADEVICE + finished_at: ~ + human_name: "alice's pinephone" + is_synapse_admin: "true" + last_active_at: "1970-01-01 00:00:00+00" + last_active_ip: 203.0.113.1/32 + user_agent: Browser/5.0 + user_id: 00000000-0000-0000-0000-000000000001 + user_session_id: ~ +users: + - can_request_admin: "false" + created_at: "1970-01-01 00:00:00+00" + deactivated_at: ~ + is_guest: "false" + locked_at: ~ + user_id: 00000000-0000-0000-0000-000000000001 + username: alice diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_email.snap b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_email.snap new file mode 100644 index 00000000..51da8006 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_email.snap @@ -0,0 +1,17 @@ +--- +source: crates/syn2mas/src/mas_writer/mod.rs +expression: db_snapshot +--- +user_emails: + - created_at: "1970-01-01 00:00:00+00" + email: alice@example.org + user_email_id: 00000000-0000-0000-0000-000000000002 + user_id: 00000000-0000-0000-0000-000000000001 +users: + - can_request_admin: "false" + created_at: "1970-01-01 00:00:00+00" + deactivated_at: ~ + is_guest: "false" + locked_at: ~ + user_id: 00000000-0000-0000-0000-000000000001 + username: alice diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_password.snap b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_password.snap new file mode 100644 index 00000000..1966dd5c --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_password.snap @@ -0,0 +1,19 @@ +--- +source: crates/syn2mas/src/mas_writer/mod.rs +expression: db_snapshot +--- +user_passwords: + - created_at: "1970-01-01 00:00:00+00" + hashed_password: $bcrypt$aaaaaaaaaaa + upgraded_from_id: ~ + user_id: 00000000-0000-0000-0000-000000000001 + user_password_id: 00000000-0000-0000-0000-00000000002a + version: "1" +users: + - can_request_admin: "false" + created_at: "1970-01-01 00:00:00+00" + deactivated_at: ~ + is_guest: "false" + locked_at: ~ + user_id: 00000000-0000-0000-0000-000000000001 + username: alice diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_refresh_token.snap b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_refresh_token.snap new file mode 100644 index 00000000..89377040 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_refresh_token.snap @@ -0,0 +1,37 @@ +--- +source: crates/syn2mas/src/mas_writer/mod.rs +expression: db_snapshot +--- +compat_access_tokens: + - access_token: syt_zxcvzxcvzxcvzxcv_zxcv + compat_access_token_id: 00000000-0000-0000-0000-000000000006 + compat_session_id: 00000000-0000-0000-0000-000000000005 + created_at: "1970-01-01 00:00:00+00" + expires_at: ~ +compat_refresh_tokens: + - compat_access_token_id: 00000000-0000-0000-0000-000000000006 + compat_refresh_token_id: 00000000-0000-0000-0000-000000000007 + compat_session_id: 00000000-0000-0000-0000-000000000005 + consumed_at: ~ + created_at: "1970-01-01 00:00:00+00" + refresh_token: syr_zxcvzxcvzxcvzxcv_zxcv +compat_sessions: + - compat_session_id: 00000000-0000-0000-0000-000000000005 + created_at: "1970-01-01 00:00:00+00" + device_id: ADEVICE + finished_at: ~ + human_name: ~ + is_synapse_admin: "false" + last_active_at: ~ + last_active_ip: ~ + user_agent: ~ + user_id: 00000000-0000-0000-0000-000000000001 + user_session_id: ~ +users: + - can_request_admin: "false" + created_at: "1970-01-01 00:00:00+00" + deactivated_at: ~ + is_guest: "false" + locked_at: ~ + user_id: 00000000-0000-0000-0000-000000000001 + username: alice diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_unsupported_threepid.snap b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_unsupported_threepid.snap new file mode 100644 index 00000000..e81697c6 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_unsupported_threepid.snap @@ -0,0 +1,17 @@ +--- +source: crates/syn2mas/src/mas_writer/mod.rs +expression: db_snapshot +--- +user_unsupported_third_party_ids: + - address: "441189998819991197253" + created_at: "1970-01-01 00:00:00+00" + medium: msisdn + user_id: 00000000-0000-0000-0000-000000000001 +users: + - can_request_admin: "false" + created_at: "1970-01-01 00:00:00+00" + deactivated_at: ~ + is_guest: "false" + locked_at: ~ + user_id: 00000000-0000-0000-0000-000000000001 + username: alice diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_upstream_provider_link.snap b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_upstream_provider_link.snap new file mode 100644 index 00000000..7b9173eb --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_upstream_provider_link.snap @@ -0,0 +1,46 @@ +--- +source: crates/syn2mas/src/mas_writer/mod.rs +expression: db_snapshot +--- +upstream_oauth_links: + - created_at: "1970-01-01 00:00:00+00" + human_account_name: ~ + subject: "12345.67890" + upstream_oauth_link_id: 00000000-0000-0000-0000-000000000003 + upstream_oauth_provider_id: 00000000-0000-0000-0000-000000000004 + user_id: 00000000-0000-0000-0000-000000000001 +upstream_oauth_providers: + - additional_parameters: ~ + authorization_endpoint_override: ~ + brand_name: ~ + claims_imports: "{}" + client_id: someClientId + created_at: "2011-12-13 14:15:16+00" + disabled_at: ~ + discovery_mode: oidc + encrypted_client_secret: ~ + fetch_userinfo: "false" + forward_login_hint: "false" + human_name: ~ + id_token_signed_response_alg: RS256 + issuer: ~ + jwks_uri_override: ~ + on_backchannel_logout: do_nothing + pkce_mode: auto + response_mode: query + scope: openid + token_endpoint_auth_method: client_secret_basic + token_endpoint_override: ~ + token_endpoint_signing_alg: ~ + ui_order: "0" + upstream_oauth_provider_id: 00000000-0000-0000-0000-000000000004 + userinfo_endpoint_override: ~ + userinfo_signed_response_alg: ~ +users: + - can_request_admin: "false" + created_at: "1970-01-01 00:00:00+00" + deactivated_at: ~ + is_guest: "false" + locked_at: ~ + user_id: 00000000-0000-0000-0000-000000000001 + username: alice diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/syn2mas_revert_temporary_tables.sql b/matrix-authentication-service/crates/syn2mas/src/mas_writer/syn2mas_revert_temporary_tables.sql new file mode 100644 index 00000000..73f4cfe5 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/syn2mas_revert_temporary_tables.sql @@ -0,0 +1,18 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +-- This script should revert what `syn2mas_temporary_tables.sql` does. + +DROP TABLE syn2mas_restore_constraints; +DROP TABLE syn2mas_restore_indices; + +ALTER TABLE syn2mas__users RENAME TO users; +ALTER TABLE syn2mas__user_passwords RENAME TO user_passwords; +ALTER TABLE syn2mas__user_emails RENAME TO user_emails; +ALTER TABLE syn2mas__user_unsupported_third_party_ids RENAME TO user_unsupported_third_party_ids; +ALTER TABLE syn2mas__upstream_oauth_links RENAME TO upstream_oauth_links; +ALTER TABLE syn2mas__compat_sessions RENAME TO compat_sessions; +ALTER TABLE syn2mas__compat_access_tokens RENAME TO compat_access_tokens; +ALTER TABLE syn2mas__compat_refresh_tokens RENAME TO compat_refresh_tokens; diff --git a/matrix-authentication-service/crates/syn2mas/src/mas_writer/syn2mas_temporary_tables.sql b/matrix-authentication-service/crates/syn2mas/src/mas_writer/syn2mas_temporary_tables.sql new file mode 100644 index 00000000..873ceeb7 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/mas_writer/syn2mas_temporary_tables.sql @@ -0,0 +1,47 @@ +-- Copyright 2024 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + + +-- # syn2mas Temporary Tables +-- This file takes a MAS database and: +-- +-- 1. creates temporary tables used by syn2mas for storing restore data +-- 2. renames important tables with the `syn2mas__` prefix, to prevent +-- running MAS instances from having any opportunity to see or modify +-- the partial data in the database, especially whilst it is not protected +-- by constraints. +-- +-- All changes in this file must be reverted by `syn2mas_revert_temporary_tables.sql` +-- in the same directory. + +-- corresponds to `ConstraintDescription` +CREATE TABLE syn2mas_restore_constraints ( + -- synthetic auto-incrementing ID so we can load these in order + order_key SERIAL NOT NULL PRIMARY KEY, + + table_name TEXT NOT NULL, + name TEXT NOT NULL, + definition TEXT NOT NULL +); + +-- corresponds to `IndexDescription` +CREATE TABLE syn2mas_restore_indices ( + -- synthetic auto-incrementing ID so we can load these in order + order_key SERIAL NOT NULL PRIMARY KEY, + + table_name TEXT NOT NULL, + name TEXT NOT NULL, + definition TEXT NOT NULL +); + +-- Now we rename all tables that we touch during the migration. +ALTER TABLE users RENAME TO syn2mas__users; +ALTER TABLE user_passwords RENAME TO syn2mas__user_passwords; +ALTER TABLE user_emails RENAME TO syn2mas__user_emails; +ALTER TABLE user_unsupported_third_party_ids RENAME TO syn2mas__user_unsupported_third_party_ids; +ALTER TABLE upstream_oauth_links RENAME TO syn2mas__upstream_oauth_links; +ALTER TABLE compat_sessions RENAME TO syn2mas__compat_sessions; +ALTER TABLE compat_access_tokens RENAME TO syn2mas__compat_access_tokens; +ALTER TABLE compat_refresh_tokens RENAME TO syn2mas__compat_refresh_tokens; diff --git a/matrix-authentication-service/crates/syn2mas/src/migration.rs b/matrix-authentication-service/crates/syn2mas/src/migration.rs new file mode 100644 index 00000000..d71e8cd0 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/migration.rs @@ -0,0 +1,1050 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! # Migration +//! +//! This module provides the high-level logic for performing the Synapse-to-MAS +//! database migration. +//! +//! This module does not implement any of the safety checks that should be run +//! *before* the migration. + +use std::time::Instant; + +use chrono::{DateTime, Utc}; +use compact_str::CompactString; +use futures_util::{SinkExt, StreamExt as _, TryFutureExt, TryStreamExt as _}; +use mas_data_model::Clock; +use rand::{RngCore, SeedableRng}; +use thiserror::Error; +use thiserror_ext::ContextInto; +use tokio_util::sync::PollSender; +use tracing::{Instrument as _, Level, info}; +use ulid::Ulid; +use uuid::{NonNilUuid, Uuid}; + +use crate::{ + HashMap, ProgressCounter, RandomState, SynapseReader, + mas_writer::{ + self, MasNewCompatAccessToken, MasNewCompatRefreshToken, MasNewCompatSession, + MasNewEmailThreepid, MasNewUnsupportedThreepid, MasNewUpstreamOauthLink, MasNewUser, + MasNewUserPassword, MasWriteBuffer, MasWriter, + }, + progress::{EntityType, Progress}, + synapse_reader::{ + self, ExtractLocalpartError, FullUserId, SynapseAccessToken, SynapseDevice, + SynapseExternalId, SynapseRefreshableTokenPair, SynapseThreepid, SynapseUser, + }, +}; + +#[derive(Debug, Error, ContextInto)] +pub enum Error { + #[error("error when reading synapse DB ({context}): {source}")] + Synapse { + source: synapse_reader::Error, + context: String, + }, + #[error("error when writing to MAS DB ({context}): {source}")] + Mas { + source: mas_writer::Error, + context: String, + }, + #[error("failed to extract localpart of {user:?}: {source}")] + ExtractLocalpart { + source: ExtractLocalpartError, + user: FullUserId, + }, + #[error("channel closed")] + ChannelClosed, + + #[error("task failed ({context}): {source}")] + Join { + source: tokio::task::JoinError, + context: String, + }, + + #[error("user {user} was not found for migration but a row in {table} was found for them")] + MissingUserFromDependentTable { table: String, user: FullUserId }, + #[error( + "missing a mapping for the auth provider with ID {synapse_id:?} (used by {user} and maybe other users)" + )] + MissingAuthProviderMapping { + /// `auth_provider` ID of the provider in Synapse, for which we have no + /// mapping + synapse_id: String, + /// a user that is using this auth provider + user: FullUserId, + }, +} + +bitflags::bitflags! { + #[derive(Debug, Clone, Copy)] + struct UserFlags: u8 { + const IS_SYNAPSE_ADMIN = 0b0000_0001; + const IS_DEACTIVATED = 0b0000_0010; + const IS_GUEST = 0b0000_0100; + const IS_APPSERVICE = 0b0000_1000; + } +} + +impl UserFlags { + const fn is_deactivated(self) -> bool { + self.contains(UserFlags::IS_DEACTIVATED) + } + + const fn is_guest(self) -> bool { + self.contains(UserFlags::IS_GUEST) + } + + const fn is_synapse_admin(self) -> bool { + self.contains(UserFlags::IS_SYNAPSE_ADMIN) + } + + const fn is_appservice(self) -> bool { + self.contains(UserFlags::IS_APPSERVICE) + } +} + +#[derive(Debug, Clone, Copy)] +struct UserInfo { + mas_user_id: Option, + flags: UserFlags, +} + +struct MigrationState { + /// The server name we're migrating from + server_name: String, + + /// Lookup table from user localpart to that user's infos + users: HashMap, + + /// Mapping of MAS user ID + device ID to a MAS compat session ID. + devices_to_compat_sessions: HashMap<(NonNilUuid, CompactString), Uuid>, + + /// A mapping of Synapse external ID providers to MAS upstream OAuth 2.0 + /// provider ID + provider_id_mapping: std::collections::HashMap, +} + +/// Performs a migration from Synapse's database to MAS' database. +/// +/// # Panics +/// +/// - If there are more than `usize::MAX` users +/// +/// # Errors +/// +/// Errors are returned under the following circumstances: +/// +/// - An underlying database access error, either to MAS or to Synapse. +/// - Invalid data in the Synapse database. +#[expect(clippy::implicit_hasher)] +#[allow(clippy::too_many_arguments)] +pub async fn migrate( + mut synapse: SynapseReader<'_>, + mas: MasWriter, + server_name: String, + clock: &dyn Clock, + rng: &mut impl RngCore, + provider_id_mapping: std::collections::HashMap, + progress: &Progress, + ignore_missing_auth_providers: bool, +) -> Result<(), Error> { + let counts = synapse.count_rows().await.into_synapse("counting users")?; + + let state = MigrationState { + server_name, + // We oversize the hashmaps, as the estimates are innaccurate, and we would like to avoid + // reallocations. + users: HashMap::with_capacity_and_hasher(counts.users * 9 / 8, RandomState::default()), + devices_to_compat_sessions: HashMap::with_capacity_and_hasher( + counts.devices * 9 / 8, + RandomState::default(), + ), + provider_id_mapping, + }; + + let progress_counter = progress.migrating_data(EntityType::Users, counts.users); + let (mas, state) = migrate_users(&mut synapse, mas, state, rng, progress_counter).await?; + + let progress_counter = progress.migrating_data(EntityType::ThreePids, counts.threepids); + let (mas, state) = migrate_threepids(&mut synapse, mas, rng, state, progress_counter).await?; + + let progress_counter = progress.migrating_data(EntityType::ExternalIds, counts.external_ids); + let (mas, state) = migrate_external_ids( + &mut synapse, + mas, + rng, + state, + progress_counter, + ignore_missing_auth_providers, + ) + .await?; + + let progress_counter = progress.migrating_data( + EntityType::NonRefreshableAccessTokens, + counts.access_tokens - counts.refresh_tokens, + ); + let (mas, state) = + migrate_unrefreshable_access_tokens(&mut synapse, mas, clock, rng, state, progress_counter) + .await?; + + let progress_counter = + progress.migrating_data(EntityType::RefreshableTokens, counts.refresh_tokens); + let (mas, state) = + migrate_refreshable_token_pairs(&mut synapse, mas, clock, rng, state, progress_counter) + .await?; + + let progress_counter = progress.migrating_data(EntityType::Devices, counts.devices); + let (mas, _state) = migrate_devices(&mut synapse, mas, rng, state, progress_counter).await?; + + synapse + .finish() + .await + .into_synapse("failed to close Synapse reader")?; + + mas.finish(progress) + .await + .into_mas("failed to finalise MAS database")?; + + Ok(()) +} + +#[tracing::instrument(skip_all, level = Level::INFO)] +async fn migrate_users( + synapse: &mut SynapseReader<'_>, + mut mas: MasWriter, + mut state: MigrationState, + rng: &mut impl RngCore, + progress_counter: ProgressCounter, +) -> Result<(MasWriter, MigrationState), Error> { + let start = Instant::now(); + let progress_counter_ = progress_counter.clone(); + + let (tx, mut rx) = tokio::sync::mpsc::channel::(100 * 1024); + + // create a new RNG seeded from the passed RNG so that we can move it into the + // spawned task + let mut rng = rand_chacha::ChaChaRng::from_rng(rng).expect("failed to seed rng"); + let task = tokio::spawn( + async move { + let mut user_buffer = MasWriteBuffer::new(&mas); + let mut password_buffer = MasWriteBuffer::new(&mas); + + while let Some(user) = rx.recv().await { + // Handling an edge case: some AS users may have invalid localparts containing + // extra `:` characters. These users are ignored and a warning is logged. + if user.appservice_id.is_some() + && user + .name + .0 + .strip_suffix(&format!(":{}", state.server_name)) + .is_some_and(|localpart| localpart.contains(':')) + { + tracing::warn!("AS user {} has invalid localpart, ignoring!", user.name.0); + continue; + } + + let (mas_user, mas_password_opt) = + transform_user(&user, &state.server_name, &mut rng)?; + + let mut flags = UserFlags::empty(); + if bool::from(user.admin) { + flags |= UserFlags::IS_SYNAPSE_ADMIN; + } + if bool::from(user.deactivated) { + flags |= UserFlags::IS_DEACTIVATED; + } + if bool::from(user.is_guest) { + flags |= UserFlags::IS_GUEST; + } + if user.appservice_id.is_some() { + flags |= UserFlags::IS_APPSERVICE; + + progress_counter.increment_skipped(); + + // Special case for appservice users: we don't insert them into the database + // We just record the user's information in the state and continue + state.users.insert( + CompactString::new(&mas_user.username), + UserInfo { + mas_user_id: None, + flags, + }, + ); + continue; + } + + state.users.insert( + CompactString::new(&mas_user.username), + UserInfo { + mas_user_id: Some(mas_user.user_id), + flags, + }, + ); + + user_buffer + .write(&mut mas, mas_user) + .await + .into_mas("writing user")?; + + if let Some(mas_password) = mas_password_opt { + password_buffer + .write(&mut mas, mas_password) + .await + .into_mas("writing password")?; + } + + progress_counter.increment_migrated(); + } + + user_buffer + .finish(&mut mas) + .await + .into_mas("writing users")?; + password_buffer + .finish(&mut mas) + .await + .into_mas("writing passwords")?; + + Ok((mas, state)) + } + .instrument(tracing::info_span!("ingest_task")), + ); + + // In case this has an error, we still want to join the task, so we look at the + // error later + let res = synapse + .read_users() + .map_err(|e| e.into_synapse("reading users")) + .forward(PollSender::new(tx).sink_map_err(|_| Error::ChannelClosed)) + .inspect_err(|e| tracing::error!(error = e as &dyn std::error::Error)) + .await; + + let (mas, state) = task.await.into_join("user write task")??; + + res?; + + info!( + "{} users migrated ({} skipped) in {:.1}s", + progress_counter_.migrated(), + progress_counter_.skipped(), + Instant::now().duration_since(start).as_secs_f64() + ); + + Ok((mas, state)) +} + +#[tracing::instrument(skip_all, level = Level::INFO)] +async fn migrate_threepids( + synapse: &mut SynapseReader<'_>, + mut mas: MasWriter, + rng: &mut impl RngCore, + state: MigrationState, + progress_counter: ProgressCounter, +) -> Result<(MasWriter, MigrationState), Error> { + let start = Instant::now(); + let progress_counter_ = progress_counter.clone(); + + let (tx, mut rx) = tokio::sync::mpsc::channel::(100 * 1024); + + // create a new RNG seeded from the passed RNG so that we can move it into the + // spawned task + let mut rng = rand_chacha::ChaChaRng::from_rng(rng).expect("failed to seed rng"); + let task = tokio::spawn( + async move { + let mut email_buffer = MasWriteBuffer::new(&mas); + let mut unsupported_buffer = MasWriteBuffer::new(&mas); + + while let Some(threepid) = rx.recv().await { + let SynapseThreepid { + user_id: synapse_user_id, + medium, + address, + added_at, + } = threepid; + let created_at: DateTime = added_at.into(); + + let username = synapse_user_id + .extract_localpart(&state.server_name) + .into_extract_localpart(synapse_user_id.clone())? + .to_owned(); + let Some(user_infos) = state.users.get(username.as_str()).copied() else { + return Err(Error::MissingUserFromDependentTable { + table: "user_threepids".to_owned(), + user: synapse_user_id, + }); + }; + + let Some(mas_user_id) = user_infos.mas_user_id else { + progress_counter.increment_skipped(); + continue; + }; + + if medium == "email" { + email_buffer + .write( + &mut mas, + MasNewEmailThreepid { + user_id: mas_user_id, + user_email_id: Uuid::from(Ulid::from_datetime_with_source( + created_at.into(), + &mut rng, + )), + email: address, + created_at, + }, + ) + .await + .into_mas("writing email")?; + } else { + unsupported_buffer + .write( + &mut mas, + MasNewUnsupportedThreepid { + user_id: mas_user_id, + medium, + address, + created_at, + }, + ) + .await + .into_mas("writing unsupported threepid")?; + } + + progress_counter.increment_migrated(); + } + + email_buffer + .finish(&mut mas) + .await + .into_mas("writing email threepids")?; + unsupported_buffer + .finish(&mut mas) + .await + .into_mas("writing unsupported threepids")?; + + Ok((mas, state)) + } + .instrument(tracing::info_span!("ingest_task")), + ); + + // In case this has an error, we still want to join the task, so we look at the + // error later + let res = synapse + .read_threepids() + .map_err(|e| e.into_synapse("reading threepids")) + .forward(PollSender::new(tx).sink_map_err(|_| Error::ChannelClosed)) + .inspect_err(|e| tracing::error!(error = e as &dyn std::error::Error)) + .await; + + let (mas, state) = task.await.into_join("threepid write task")??; + + res?; + + info!( + "{} third-party IDs migrated ({} skipped) in {:.1}s", + progress_counter_.migrated(), + progress_counter_.skipped(), + Instant::now().duration_since(start).as_secs_f64() + ); + + Ok((mas, state)) +} + +#[tracing::instrument(skip_all, level = Level::INFO)] +async fn migrate_external_ids( + synapse: &mut SynapseReader<'_>, + mut mas: MasWriter, + rng: &mut impl RngCore, + state: MigrationState, + progress_counter: ProgressCounter, + ignore_missing_auth_providers: bool, +) -> Result<(MasWriter, MigrationState), Error> { + let start = Instant::now(); + let progress_counter_ = progress_counter.clone(); + + let (tx, mut rx) = tokio::sync::mpsc::channel::(100 * 1024); + + // create a new RNG seeded from the passed RNG so that we can move it into the + // spawned task + let mut rng = rand_chacha::ChaChaRng::from_rng(rng).expect("failed to seed rng"); + let task = tokio::spawn( + async move { + let mut write_buffer = MasWriteBuffer::new(&mas); + + while let Some(extid) = rx.recv().await { + let SynapseExternalId { + user_id: synapse_user_id, + auth_provider, + external_id: subject, + } = extid; + let username = synapse_user_id + .extract_localpart(&state.server_name) + .into_extract_localpart(synapse_user_id.clone())? + .to_owned(); + let Some(user_infos) = state.users.get(username.as_str()).copied() else { + return Err(Error::MissingUserFromDependentTable { + table: "user_external_ids".to_owned(), + user: synapse_user_id, + }); + }; + + let Some(mas_user_id) = user_infos.mas_user_id else { + progress_counter.increment_skipped(); + continue; + }; + + let Some(&upstream_provider_id) = state.provider_id_mapping.get(&auth_provider) + else { + if ignore_missing_auth_providers { + progress_counter.increment_skipped(); + continue; + } + return Err(Error::MissingAuthProviderMapping { + synapse_id: auth_provider, + user: synapse_user_id, + }); + }; + + // To save having to store user creation times, extract it from the ULID + // This gives millisecond precision — good enough. + let user_created_ts = Ulid::from(mas_user_id.get()).datetime(); + + let link_id: Uuid = + Ulid::from_datetime_with_source(user_created_ts, &mut rng).into(); + + write_buffer + .write( + &mut mas, + MasNewUpstreamOauthLink { + link_id, + user_id: mas_user_id, + upstream_provider_id, + subject, + created_at: user_created_ts.into(), + }, + ) + .await + .into_mas("failed to write upstream link")?; + + progress_counter.increment_migrated(); + } + + write_buffer + .finish(&mut mas) + .await + .into_mas("writing upstream links")?; + + Ok((mas, state)) + } + .instrument(tracing::info_span!("ingest_task")), + ); + + // In case this has an error, we still want to join the task, so we look at the + // error later + let res = synapse + .read_user_external_ids() + .map_err(|e| e.into_synapse("reading external ID")) + .forward(PollSender::new(tx).sink_map_err(|_| Error::ChannelClosed)) + .inspect_err(|e| tracing::error!(error = e as &dyn std::error::Error)) + .await; + + let (mas, state) = task.await.into_join("external IDs write task")??; + + res?; + + info!( + "{} upstream links (external IDs) migrated ({} skipped) in {:.1}s", + progress_counter_.migrated(), + progress_counter_.skipped(), + Instant::now().duration_since(start).as_secs_f64() + ); + + Ok((mas, state)) +} + +/// Migrate devices from Synapse to MAS (as compat sessions). +/// +/// In order to get the right session creation timestamps, the access tokens +/// must counterintuitively be migrated first, with the ULIDs passed in as +/// `devices`. +/// +/// This is because only access tokens store a timestamp that in any way +/// resembles a creation timestamp. +#[tracing::instrument(skip_all, level = Level::INFO)] +async fn migrate_devices( + synapse: &mut SynapseReader<'_>, + mut mas: MasWriter, + rng: &mut impl RngCore, + mut state: MigrationState, + progress_counter: ProgressCounter, +) -> Result<(MasWriter, MigrationState), Error> { + let start = Instant::now(); + let progress_counter_ = progress_counter.clone(); + + let (tx, mut rx) = tokio::sync::mpsc::channel(100 * 1024); + + // create a new RNG seeded from the passed RNG so that we can move it into the + // spawned task + let mut rng = rand_chacha::ChaChaRng::from_rng(rng).expect("failed to seed rng"); + let task = tokio::spawn( + async move { + let mut write_buffer = MasWriteBuffer::new(&mas); + + while let Some(device) = rx.recv().await { + let SynapseDevice { + user_id: synapse_user_id, + device_id, + display_name, + last_seen, + ip, + user_agent, + } = device; + let username = synapse_user_id + .extract_localpart(&state.server_name) + .into_extract_localpart(synapse_user_id.clone())? + .to_owned(); + let Some(user_infos) = state.users.get(username.as_str()).copied() else { + return Err(Error::MissingUserFromDependentTable { + table: "devices".to_owned(), + user: synapse_user_id, + }); + }; + + let Some(mas_user_id) = user_infos.mas_user_id else { + progress_counter.increment_skipped(); + continue; + }; + + if user_infos.flags.is_deactivated() + || user_infos.flags.is_guest() + || user_infos.flags.is_appservice() + { + continue; + } + + let session_id = *state + .devices_to_compat_sessions + .entry((mas_user_id, CompactString::new(&device_id))) + .or_insert_with(|| + // We don't have a creation time for this device (as it has no access token), + // so use now as a least-evil fallback. + Ulid::with_source(&mut rng).into()); + let created_at = Ulid::from(session_id).datetime().into(); + + // As we're using a real IP type in the MAS database, it is possible + // that we encounter invalid IP addresses in the Synapse database. + // In that case, we should ignore them, but still log a warning. + // One special case: Synapse will record '-' as IP in some cases, we don't want + // to log about those + let last_active_ip = ip.filter(|ip| ip != "-").and_then(|ip| { + ip.parse() + .map_err(|e| { + tracing::warn!( + error = &e as &dyn std::error::Error, + mxid = %synapse_user_id, + %device_id, + %ip, + "Failed to parse device IP, ignoring" + ); + }) + .ok() + }); + + write_buffer + .write( + &mut mas, + MasNewCompatSession { + session_id, + user_id: mas_user_id, + device_id: Some(device_id), + human_name: display_name, + created_at, + is_synapse_admin: user_infos.flags.is_synapse_admin(), + last_active_at: last_seen.map(DateTime::from), + last_active_ip, + user_agent, + }, + ) + .await + .into_mas("writing compat sessions")?; + + progress_counter.increment_migrated(); + } + + write_buffer + .finish(&mut mas) + .await + .into_mas("writing compat sessions")?; + + Ok((mas, state)) + } + .instrument(tracing::info_span!("ingest_task")), + ); + + // In case this has an error, we still want to join the task, so we look at the + // error later + let res = synapse + .read_devices() + .map_err(|e| e.into_synapse("reading devices")) + .forward(PollSender::new(tx).sink_map_err(|_| Error::ChannelClosed)) + .inspect_err(|e| tracing::error!(error = e as &dyn std::error::Error)) + .await; + + let (mas, state) = task.await.into_join("device write task")??; + + res?; + + info!( + "{} devices migrated ({} skipped) in {:.1}s", + progress_counter_.migrated(), + progress_counter_.skipped(), + Instant::now().duration_since(start).as_secs_f64() + ); + + Ok((mas, state)) +} + +/// Migrates unrefreshable access tokens (those without an associated refresh +/// token). Some of these may be deviceless. +#[tracing::instrument(skip_all, level = Level::INFO)] +async fn migrate_unrefreshable_access_tokens( + synapse: &mut SynapseReader<'_>, + mut mas: MasWriter, + clock: &dyn Clock, + rng: &mut impl RngCore, + mut state: MigrationState, + progress_counter: ProgressCounter, +) -> Result<(MasWriter, MigrationState), Error> { + let start = Instant::now(); + let progress_counter_ = progress_counter.clone(); + + let (tx, mut rx) = tokio::sync::mpsc::channel(100 * 1024); + + let now = clock.now(); + // create a new RNG seeded from the passed RNG so that we can move it into the + // spawned task + let mut rng = rand_chacha::ChaChaRng::from_rng(rng).expect("failed to seed rng"); + let task = tokio::spawn( + async move { + let mut write_buffer = MasWriteBuffer::new(&mas); + let mut deviceless_session_write_buffer = MasWriteBuffer::new(&mas); + + while let Some(token) = rx.recv().await { + let SynapseAccessToken { + user_id: synapse_user_id, + device_id, + token, + valid_until_ms, + last_validated, + } = token; + let username = synapse_user_id + .extract_localpart(&state.server_name) + .into_extract_localpart(synapse_user_id.clone())? + .to_owned(); + let Some(user_infos) = state.users.get(username.as_str()).copied() else { + return Err(Error::MissingUserFromDependentTable { + table: "access_tokens".to_owned(), + user: synapse_user_id, + }); + }; + + let Some(mas_user_id) = user_infos.mas_user_id else { + progress_counter.increment_skipped(); + continue; + }; + + if user_infos.flags.is_deactivated() + || user_infos.flags.is_guest() + || user_infos.flags.is_appservice() + { + progress_counter.increment_skipped(); + continue; + } + + // It's not always accurate, but last_validated is *often* the creation time of + // the device If we don't have one, then use the current time as a + // fallback. + let created_at = last_validated.map_or_else(|| now, DateTime::from); + + let session_id = if let Some(device_id) = device_id { + // Use the existing device_id if this is the second token for a device + *state + .devices_to_compat_sessions + .entry((mas_user_id, CompactString::new(&device_id))) + .or_insert_with(|| { + Uuid::from(Ulid::from_datetime_with_source(created_at.into(), &mut rng)) + }) + } else { + // If this is a deviceless access token, create a deviceless compat session + // for it (since otherwise we won't create one whilst migrating devices) + let deviceless_session_id = + Uuid::from(Ulid::from_datetime_with_source(created_at.into(), &mut rng)); + + deviceless_session_write_buffer + .write( + &mut mas, + MasNewCompatSession { + session_id: deviceless_session_id, + user_id: mas_user_id, + device_id: None, + human_name: None, + created_at, + is_synapse_admin: false, + last_active_at: None, + last_active_ip: None, + user_agent: None, + }, + ) + .await + .into_mas("failed to write deviceless compat sessions")?; + + deviceless_session_id + }; + + let token_id = + Uuid::from(Ulid::from_datetime_with_source(created_at.into(), &mut rng)); + + write_buffer + .write( + &mut mas, + MasNewCompatAccessToken { + token_id, + session_id, + access_token: token, + created_at, + expires_at: valid_until_ms.map(DateTime::from), + }, + ) + .await + .into_mas("writing compat access tokens")?; + + progress_counter.increment_migrated(); + } + write_buffer + .finish(&mut mas) + .await + .into_mas("writing compat access tokens")?; + deviceless_session_write_buffer + .finish(&mut mas) + .await + .into_mas("writing deviceless compat sessions")?; + + Ok((mas, state)) + } + .instrument(tracing::info_span!("ingest_task")), + ); + + // In case this has an error, we still want to join the task, so we look at the + // error later + let res = synapse + .read_unrefreshable_access_tokens() + .map_err(|e| e.into_synapse("reading tokens")) + .forward(PollSender::new(tx).sink_map_err(|_| Error::ChannelClosed)) + .inspect_err(|e| tracing::error!(error = e as &dyn std::error::Error)) + .await; + + let (mas, state) = task.await.into_join("token write task")??; + + res?; + + info!( + "{} non-refreshable access tokens migrated ({} skipped) in {:.1}s", + progress_counter_.migrated(), + progress_counter_.skipped(), + Instant::now().duration_since(start).as_secs_f64() + ); + + Ok((mas, state)) +} + +/// Migrates (access token, refresh token) pairs. +/// Does not migrate non-refreshable access tokens. +#[tracing::instrument(skip_all, level = Level::INFO)] +async fn migrate_refreshable_token_pairs( + synapse: &mut SynapseReader<'_>, + mut mas: MasWriter, + clock: &dyn Clock, + rng: &mut impl RngCore, + mut state: MigrationState, + progress_counter: ProgressCounter, +) -> Result<(MasWriter, MigrationState), Error> { + let start = Instant::now(); + let progress_counter_ = progress_counter.clone(); + + let (tx, mut rx) = tokio::sync::mpsc::channel::(100 * 1024); + + // create a new RNG seeded from the passed RNG so that we can move it into the + // spawned task + let mut rng = rand_chacha::ChaChaRng::from_rng(rng).expect("failed to seed rng"); + let now = clock.now(); + let task = tokio::spawn( + async move { + let mut access_token_write_buffer = MasWriteBuffer::new(&mas); + let mut refresh_token_write_buffer = MasWriteBuffer::new(&mas); + + while let Some(token) = rx.recv().await { + let SynapseRefreshableTokenPair { + user_id: synapse_user_id, + device_id, + access_token, + refresh_token, + valid_until_ms, + last_validated, + } = token; + + let username = synapse_user_id + .extract_localpart(&state.server_name) + .into_extract_localpart(synapse_user_id.clone())? + .to_owned(); + let Some(user_infos) = state.users.get(username.as_str()).copied() else { + return Err(Error::MissingUserFromDependentTable { + table: "refresh_tokens".to_owned(), + user: synapse_user_id, + }); + }; + + let Some(mas_user_id) = user_infos.mas_user_id else { + progress_counter.increment_skipped(); + continue; + }; + + if user_infos.flags.is_deactivated() + || user_infos.flags.is_guest() + || user_infos.flags.is_appservice() + { + progress_counter.increment_skipped(); + continue; + } + + // It's not always accurate, but last_validated is *often* the creation time of + // the device If we don't have one, then use the current time as a + // fallback. + let created_at = last_validated.map_or_else(|| now, DateTime::from); + + // Use the existing device_id if this is the second token for a device + let session_id = *state + .devices_to_compat_sessions + .entry((mas_user_id, CompactString::new(&device_id))) + .or_insert_with(|| { + Uuid::from(Ulid::from_datetime_with_source(created_at.into(), &mut rng)) + }); + + let access_token_id = + Uuid::from(Ulid::from_datetime_with_source(created_at.into(), &mut rng)); + let refresh_token_id = + Uuid::from(Ulid::from_datetime_with_source(created_at.into(), &mut rng)); + + access_token_write_buffer + .write( + &mut mas, + MasNewCompatAccessToken { + token_id: access_token_id, + session_id, + access_token, + created_at, + expires_at: valid_until_ms.map(DateTime::from), + }, + ) + .await + .into_mas("writing compat access tokens")?; + refresh_token_write_buffer + .write( + &mut mas, + MasNewCompatRefreshToken { + refresh_token_id, + session_id, + access_token_id, + refresh_token, + created_at, + }, + ) + .await + .into_mas("writing compat refresh tokens")?; + + progress_counter.increment_migrated(); + } + + access_token_write_buffer + .finish(&mut mas) + .await + .into_mas("writing compat access tokens")?; + + refresh_token_write_buffer + .finish(&mut mas) + .await + .into_mas("writing compat refresh tokens")?; + Ok((mas, state)) + } + .instrument(tracing::info_span!("ingest_task")), + ); + + // In case this has an error, we still want to join the task, so we look at the + // error later + let res = synapse + .read_refreshable_token_pairs() + .map_err(|e| e.into_synapse("reading refresh token pairs")) + .forward(PollSender::new(tx).sink_map_err(|_| Error::ChannelClosed)) + .inspect_err(|e| tracing::error!(error = e as &dyn std::error::Error)) + .await; + + let (mas, state) = task.await.into_join("refresh token write task")??; + + res?; + + info!( + "{} refreshable token pairs migrated ({} skipped) in {:.1}s", + progress_counter_.migrated(), + progress_counter_.skipped(), + Instant::now().duration_since(start).as_secs_f64() + ); + + Ok((mas, state)) +} + +fn transform_user( + user: &SynapseUser, + server_name: &str, + rng: &mut impl RngCore, +) -> Result<(MasNewUser, Option), Error> { + let username = user + .name + .extract_localpart(server_name) + .into_extract_localpart(user.name.clone())? + .to_owned(); + + let user_id = Uuid::from(Ulid::from_datetime_with_source( + DateTime::::from(user.creation_ts).into(), + rng, + )) + .try_into() + .expect("ULID generation lead to a nil UUID, this is a bug!"); + + let new_user = MasNewUser { + user_id, + username, + created_at: user.creation_ts.into(), + locked_at: user.locked.then_some(user.creation_ts.into()), + deactivated_at: bool::from(user.deactivated).then_some(user.creation_ts.into()), + can_request_admin: bool::from(user.admin), + is_guest: bool::from(user.is_guest), + }; + + let mas_password = user + .password_hash + .clone() + .map(|password_hash| MasNewUserPassword { + user_password_id: Uuid::from(Ulid::from_datetime_with_source( + DateTime::::from(user.creation_ts).into(), + rng, + )), + user_id: new_user.user_id, + hashed_password: password_hash, + created_at: new_user.created_at, + }); + + Ok((new_user, mas_password)) +} diff --git a/matrix-authentication-service/crates/syn2mas/src/progress.rs b/matrix-authentication-service/crates/syn2mas/src/progress.rs new file mode 100644 index 00000000..cdd7ab41 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/progress.rs @@ -0,0 +1,208 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::{Arc, LazyLock, atomic::AtomicU32}; + +use arc_swap::ArcSwap; +use opentelemetry::{ + KeyValue, + metrics::{Counter, Gauge}, +}; + +use crate::telemetry::METER; + +/// A gauge that tracks the approximate number of entities of a given type +/// that will be migrated. +pub static APPROX_TOTAL_GAUGE: LazyLock> = LazyLock::new(|| { + METER + .u64_gauge("syn2mas.entity.approx_total") + .with_description("Approximate number of entities of this type to be migrated") + .build() +}); + +/// A counter that tracks the number of entities of a given type that have +/// been migrated so far. +pub static MIGRATED_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("syn2mas.entity.migrated") + .with_description("Number of entities of this type that have been migrated so far") + .build() +}); + +/// A counter that tracks the number of entities of a given type that have +/// been skipped so far. +pub static SKIPPED_COUNTER: LazyLock> = LazyLock::new(|| { + METER + .u64_counter("syn2mas.entity.skipped") + .with_description("Number of entities of this type that have been skipped so far") + .build() +}); + +/// Enum representing the different types of entities that syn2mas can migrate. +#[derive(Debug, Clone, Copy)] +pub enum EntityType { + /// Represents users + Users, + + /// Represents devices + Devices, + + /// Represents third-party IDs + ThreePids, + + /// Represents external IDs + ExternalIds, + + /// Represents non-refreshable access tokens + NonRefreshableAccessTokens, + + /// Represents refreshable access tokens + RefreshableTokens, +} + +impl std::fmt::Display for EntityType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl EntityType { + pub const fn name(self) -> &'static str { + match self { + Self::Users => "users", + Self::Devices => "devices", + Self::ThreePids => "threepids", + Self::ExternalIds => "external_ids", + Self::NonRefreshableAccessTokens => "nonrefreshable_access_tokens", + Self::RefreshableTokens => "refreshable_tokens", + } + } + + pub fn as_kv(self) -> KeyValue { + KeyValue::new("entity", self.name()) + } +} + +/// Tracker for the progress of the migration +/// +/// Cloning this struct intuitively gives a 'handle' to the same counters, +/// which means it can be shared between tasks/threads. +#[derive(Clone)] +pub struct Progress { + current_stage: Arc>, +} + +#[derive(Clone)] +pub struct ProgressCounter { + inner: Arc, +} + +struct ProgressCounterInner { + kv: [KeyValue; 1], + migrated: AtomicU32, + skipped: AtomicU32, +} + +impl ProgressCounter { + fn new(entity: EntityType) -> Self { + Self { + inner: Arc::new(ProgressCounterInner { + kv: [entity.as_kv()], + migrated: AtomicU32::new(0), + skipped: AtomicU32::new(0), + }), + } + } + + pub fn increment_migrated(&self) { + MIGRATED_COUNTER.add(1, &self.inner.kv); + self.inner + .migrated + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); + } + + pub fn increment_skipped(&self) { + SKIPPED_COUNTER.add(1, &self.inner.kv); + self.inner + .skipped + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); + } + + #[must_use] + pub fn migrated(&self) -> u32 { + self.inner + .migrated + .load(std::sync::atomic::Ordering::Relaxed) + } + + #[must_use] + pub fn skipped(&self) -> u32 { + self.inner + .skipped + .load(std::sync::atomic::Ordering::Relaxed) + } +} + +impl Progress { + #[must_use] + pub fn migrating_data(&self, entity: EntityType, approx_count: usize) -> ProgressCounter { + let counter = ProgressCounter::new(entity); + APPROX_TOTAL_GAUGE.record(approx_count as u64, &[entity.as_kv()]); + self.set_current_stage(ProgressStage::MigratingData { + entity, + counter: counter.clone(), + approx_count: approx_count as u64, + }); + counter + } + + pub fn rebuild_index(&self, index_name: String) { + self.set_current_stage(ProgressStage::RebuildIndex { index_name }); + } + + pub fn rebuild_constraint(&self, constraint_name: String) { + self.set_current_stage(ProgressStage::RebuildConstraint { constraint_name }); + } + + /// Sets the current stage of progress. + /// + /// This is probably not cheap enough to use for every individual row, + /// so use of atomic integers for the fields that will be updated is + /// recommended. + #[inline] + fn set_current_stage(&self, stage: ProgressStage) { + self.current_stage.store(Arc::new(stage)); + } + + /// Returns the current stage of progress. + #[inline] + #[must_use] + pub fn get_current_stage(&self) -> arc_swap::Guard> { + self.current_stage.load() + } +} + +impl Default for Progress { + fn default() -> Self { + Self { + current_stage: Arc::new(ArcSwap::new(Arc::new(ProgressStage::SettingUp))), + } + } +} + +pub enum ProgressStage { + SettingUp, + MigratingData { + entity: EntityType, + counter: ProgressCounter, + approx_count: u64, + }, + RebuildIndex { + index_name: String, + }, + RebuildConstraint { + constraint_name: String, + }, +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/checks.rs b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/checks.rs new file mode 100644 index 00000000..c50f1e9f --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/checks.rs @@ -0,0 +1,351 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! # Synapse Checks +//! +//! This module provides safety checks to run against a Synapse database before +//! running the Synapse-to-MAS migration. + +use figment::Figment; +use mas_config::{ + BrandingConfig, CaptchaConfig, ConfigurationSection, ConfigurationSectionExt, MatrixConfig, + PasswordAlgorithm, PasswordsConfig, UpstreamOAuth2Config, +}; +use sqlx::{PgConnection, prelude::FromRow, query_as, query_scalar}; +use thiserror::Error; + +use super::config::Config; +use crate::mas_writer::MIGRATED_PASSWORD_VERSION; + +#[derive(Debug, Error)] +pub enum Error { + #[error("query failed: {0}")] + Sqlx(#[from] sqlx::Error), + + #[error("failed to load MAS config: {0}")] + MasConfig(#[source] Box), + + #[error("failed to load MAS password config: {0}")] + MasPasswordConfig(#[source] anyhow::Error), +} + +/// An error found whilst checking the Synapse database, that should block a +/// migration. +#[derive(Debug, Error)] +pub enum CheckError { + #[error("MAS config is missing a password hashing scheme with version '1'")] + MissingPasswordScheme, + + #[error( + "Password scheme version '1' in the MAS config must use the Bcrypt algorithm, so that Synapse passwords can be imported and will be compatible." + )] + PasswordSchemeNotBcrypt, + + #[error( + "Password scheme version '1' in the MAS config must have the same secret as the `pepper` value from Synapse, so that Synapse passwords can be imported and will be compatible." + )] + PasswordSchemeWrongPepper, + + #[error( + "Guest support is enabled in the Synapse configuration. Guests aren't supported by MAS, but if you don't have any then you could disable the option. See https://github.com/element-hq/matrix-authentication-service/issues/1445" + )] + GuestsEnabled, + + #[error( + "Synapse config has `enable_3pid_changes` explicitly enabled, which must be disabled or removed." + )] + ThreepidChangesEnabled, + + #[error( + "Synapse config has `login_via_existing_session.enabled` set to true, which must be disabled." + )] + LoginViaExistingSessionEnabled, + + #[error( + "MAS configuration has the wrong `matrix.homeserver` set ({mas:?}), it should match Synapse's `server_name` ({synapse:?})" + )] + ServerNameMismatch { synapse: String, mas: String }, + + #[error( + "Synapse database contains {num_users} users associated to the OpenID Connect or OAuth2 provider '{provider}' but the Synapse configuration does not contain this provider." + )] + SynapseMissingOAuthProvider { provider: String, num_users: i64 }, + + #[error( + "Synapse database has {num_users} mapping entries from a previously-configured MAS instance. If this is from a previous migration attempt, run the following SQL query against the Synapse database: `DELETE FROM user_external_ids WHERE auth_provider = 'oauth-delegated';` and then run the migration again." + )] + ExistingOAuthDelegated { num_users: i64 }, + + #[error( + "Synapse config contains an OpenID Connect or OAuth2 provider '{provider}' (issuer: {issuer:?}) used by {num_users} users which must also be configured in the MAS configuration as an upstream provider." + )] + MasMissingOAuthProvider { + provider: String, + issuer: String, + num_users: i64, + }, +} + +/// A potential hazard found whilst checking the Synapse database, that should +/// be presented to the operator to check they are aware of a caveat before +/// proceeding with the migration. +#[derive(Debug, Error)] +pub enum CheckWarning { + #[error( + "Synapse config contains OIDC auth configuration (issuer: {issuer:?}) which will need to be manually mapped to an upstream OpenID Connect Provider during migration." + )] + UpstreamOidcProvider { issuer: String }, + + #[error( + "Synapse config contains {0} auth configuration which will need to be manually mapped as an upstream OAuth 2.0 provider during migration." + )] + ExternalAuthSystem(&'static str), + + #[error( + "Synapse config has registration enabled. This must be disabled after migration before bringing Synapse back online." + )] + DisableRegistrationAfterMigration, + + #[error("Synapse config has `user_consent` enabled. This should be disabled after migration.")] + DisableUserConsentAfterMigration, + + #[error( + "Synapse config has `user_consent` enabled but MAS has not been configured with terms of service. You may wish to set up a `tos_uri` in your MAS branding configuration to replace the user consent." + )] + ShouldPortUserConsentAsTerms, + + #[error( + "Synapse config has a registration CAPTCHA enabled, but no CAPTCHA has been configured in MAS. You may wish to manually configure this." + )] + ShouldPortRegistrationCaptcha, + + #[error( + "Synapse database contains {num_guests} guests which will be migrated are not supported by MAS. See https://github.com/element-hq/matrix-authentication-service/issues/1445" + )] + GuestsInDatabase { num_guests: i64 }, + + #[error( + "Synapse database contains {num_non_email_3pids} non-email 3PIDs (probably phone numbers), which will be migrated but are not supported by MAS." + )] + NonEmailThreepidsInDatabase { num_non_email_3pids: i64 }, + + #[error( + "Synapse database contains {num_users} users associated to the OpenID Connect or OAuth2 provider '{provider}' but the Synapse configuration does not contain this provider." + )] + SynapseMissingOAuthProvider { provider: String, num_users: i64 }, +} + +/// Check that the Synapse configuration is sane for migration. +#[must_use] +pub fn synapse_config_check(synapse_config: &Config) -> (Vec, Vec) { + let mut errors = Vec::new(); + let mut warnings = Vec::new(); + + if synapse_config.enable_registration { + warnings.push(CheckWarning::DisableRegistrationAfterMigration); + } + if synapse_config.user_consent.is_some() { + warnings.push(CheckWarning::DisableUserConsentAfterMigration); + } + + // TODO provide guidance on migrating these auth systems + // that are not directly supported as upstreams in MAS + if synapse_config.cas_config.enabled { + warnings.push(CheckWarning::ExternalAuthSystem("CAS")); + } + if synapse_config.saml2_config.enabled { + warnings.push(CheckWarning::ExternalAuthSystem("SAML2")); + } + if synapse_config.jwt_config.enabled { + warnings.push(CheckWarning::ExternalAuthSystem("JWT")); + } + if synapse_config.password_config.enabled && !synapse_config.password_config.localdb_enabled { + warnings.push(CheckWarning::ExternalAuthSystem( + "non-standard password provider plugin", + )); + } + + if synapse_config.enable_3pid_changes == Some(true) { + errors.push(CheckError::ThreepidChangesEnabled); + } + + if synapse_config.login_via_existing_session.enabled { + errors.push(CheckError::LoginViaExistingSessionEnabled); + } + + (warnings, errors) +} + +/// Check that the given Synapse configuration is sane for migration to a MAS +/// with the given MAS configuration. +/// +/// # Errors +/// +/// - If any necessary section of MAS config cannot be parsed. +/// - If the MAS password configuration (including any necessary secrets) can't +/// be loaded. +pub async fn synapse_config_check_against_mas_config( + synapse: &Config, + mas: &Figment, +) -> Result<(Vec, Vec), Error> { + let mut errors = Vec::new(); + let mut warnings = Vec::new(); + + let mas_passwords = PasswordsConfig::extract_or_default(mas).map_err(Error::MasConfig)?; + let mas_password_schemes = mas_passwords + .load() + .await + .map_err(Error::MasPasswordConfig)?; + + let mas_matrix = MatrixConfig::extract(mas).map_err(Error::MasConfig)?; + + // Look for the MAS password hashing scheme that will be used for imported + // Synapse passwords, then check the configuration matches so that Synapse + // passwords will be compatible with MAS. + if let Some((_, algorithm, _, secret, _)) = mas_password_schemes + .iter() + .find(|(version, _, _, _, _)| *version == MIGRATED_PASSWORD_VERSION) + { + if algorithm != &PasswordAlgorithm::Bcrypt { + errors.push(CheckError::PasswordSchemeNotBcrypt); + } + + let synapse_pepper = synapse + .password_config + .pepper + .as_ref() + .map(String::as_bytes); + if secret.as_deref() != synapse_pepper { + errors.push(CheckError::PasswordSchemeWrongPepper); + } + } else { + errors.push(CheckError::MissingPasswordScheme); + } + + if synapse.allow_guest_access { + errors.push(CheckError::GuestsEnabled); + } + + if synapse.server_name != mas_matrix.homeserver { + errors.push(CheckError::ServerNameMismatch { + synapse: synapse.server_name.clone(), + mas: mas_matrix.homeserver.clone(), + }); + } + + let mas_captcha = CaptchaConfig::extract_or_default(mas).map_err(Error::MasConfig)?; + if synapse.enable_registration_captcha && mas_captcha.service.is_none() { + warnings.push(CheckWarning::ShouldPortRegistrationCaptcha); + } + + let mas_branding = BrandingConfig::extract_or_default(mas).map_err(Error::MasConfig)?; + if synapse.user_consent.is_some() && mas_branding.tos_uri.is_none() { + warnings.push(CheckWarning::ShouldPortUserConsentAsTerms); + } + + Ok((warnings, errors)) +} + +/// Check that the Synapse database is sane for migration. Returns a list of +/// warnings and errors. +/// +/// # Errors +/// +/// - If there is some database connection error, or the given database is not a +/// Synapse database. +/// - If the Upstream OAuth section of the MAS configuration could not be +/// parsed. +#[tracing::instrument(skip_all)] +pub async fn synapse_database_check( + synapse_connection: &mut PgConnection, + synapse: &Config, + mas: &Figment, + ignore_missing_auth_providers: bool, +) -> Result<(Vec, Vec), Error> { + #[derive(FromRow)] + struct UpstreamOAuthProvider { + auth_provider: String, + num_users: i64, + } + + let mut errors = Vec::new(); + let mut warnings = Vec::new(); + + let num_guests: i64 = query_scalar("SELECT COUNT(1) FROM users WHERE is_guest <> 0") + .fetch_one(&mut *synapse_connection) + .await?; + if num_guests > 0 { + warnings.push(CheckWarning::GuestsInDatabase { num_guests }); + } + + let num_non_email_3pids: i64 = + query_scalar("SELECT COUNT(1) FROM user_threepids WHERE medium <> 'email'") + .fetch_one(&mut *synapse_connection) + .await?; + if num_non_email_3pids > 0 { + warnings.push(CheckWarning::NonEmailThreepidsInDatabase { + num_non_email_3pids, + }); + } + + let oauth_provider_user_counts = query_as::<_, UpstreamOAuthProvider>( + " + SELECT auth_provider, COUNT(*) AS num_users + FROM user_external_ids + GROUP BY auth_provider + ORDER BY auth_provider + ", + ) + .fetch_all(&mut *synapse_connection) + .await?; + if !oauth_provider_user_counts.is_empty() { + let syn_oauth2 = synapse.all_oidc_providers(); + let mas_oauth2 = UpstreamOAuth2Config::extract_or_default(mas).map_err(Error::MasConfig)?; + for row in oauth_provider_user_counts { + // This is a special case of a previous migration attempt to MAS + if row.auth_provider == "oauth-delegated" { + errors.push(CheckError::ExistingOAuthDelegated { + num_users: row.num_users, + }); + continue; + } + + let matching_syn = syn_oauth2.get(&row.auth_provider); + + let Some(matching_syn) = matching_syn else { + if ignore_missing_auth_providers { + warnings.push(CheckWarning::SynapseMissingOAuthProvider { + provider: row.auth_provider, + num_users: row.num_users, + }); + } else { + errors.push(CheckError::SynapseMissingOAuthProvider { + provider: row.auth_provider, + num_users: row.num_users, + }); + } + continue; + }; + + // Matching by `synapse_idp_id` is the same as what we'll do for the migration + let matching_mas = mas_oauth2.providers.iter().find(|mas_provider| { + mas_provider.synapse_idp_id.as_ref() == Some(&row.auth_provider) + }); + + if matching_mas.is_none() { + errors.push(CheckError::MasMissingOAuthProvider { + provider: row.auth_provider, + issuer: matching_syn + .issuer + .clone() + .unwrap_or("".to_owned()), + num_users: row.num_users, + }); + } + } + } + + Ok((warnings, errors)) +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/config/mod.rs b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/config/mod.rs new file mode 100644 index 00000000..3c9454ba --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/config/mod.rs @@ -0,0 +1,425 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod oidc; + +use std::collections::BTreeMap; + +use camino::Utf8PathBuf; +use chrono::{DateTime, Utc}; +use figment::providers::{Format, Yaml}; +use mas_config::{PasswordAlgorithm, PasswordHashingScheme}; +use rand::Rng; +use serde::Deserialize; +use sqlx::postgres::PgConnectOptions; +use tracing::warn; +use url::Url; + +pub use self::oidc::OidcProvider; + +/// The root of a Synapse configuration. +/// This struct only includes fields which the Synapse-to-MAS migration is +/// interested in. +/// +/// See: +#[derive(Deserialize)] +#[expect(clippy::struct_excessive_bools)] +pub struct Config { + pub database: DatabaseSection, + + #[serde(default)] + pub password_config: PasswordSection, + + pub bcrypt_rounds: Option, + + #[serde(default)] + pub allow_guest_access: bool, + + #[serde(default)] + pub enable_registration: bool, + + #[serde(default)] + pub enable_registration_captcha: bool, + pub recaptcha_public_key: Option, + pub recaptcha_private_key: Option, + + /// Normally this defaults to true, but when MAS integration is enabled in + /// Synapse it defaults to false. + #[serde(default)] + pub enable_3pid_changes: Option, + + #[serde(default = "default_true")] + enable_set_display_name: bool, + + #[serde(default)] + pub user_consent: Option, + + #[serde(default)] + pub registrations_require_3pid: Vec, + + #[serde(default)] + pub registration_requires_token: bool, + + pub registration_shared_secret: Option, + + #[serde(default)] + pub login_via_existing_session: EnableableSection, + + #[serde(default)] + pub cas_config: EnableableSection, + + #[serde(default)] + pub saml2_config: EnableableSection, + + #[serde(default)] + pub jwt_config: EnableableSection, + + #[serde(default)] + pub oidc_config: Option, + + #[serde(default)] + pub oidc_providers: Vec, + + pub server_name: String, + + pub public_baseurl: Option, +} + +impl Config { + /// Load a Synapse configuration from the given list of configuration files. + /// + /// # Errors + /// + /// - If there is a problem reading any of the files. + /// - If the configuration is not valid. + pub fn load( + files: &[Utf8PathBuf], + ) -> Result> { + let mut figment = figment::Figment::new(); + for file in files { + // TODO this is not exactly correct behaviour — Synapse does not merge anything + // other than the top level dict. + // https://github.com/element-hq/matrix-authentication-service/pull/3805#discussion_r1922680825 + // https://github.com/element-hq/synapse/blob/develop/synapse/config/_base.py?rgh-link-date=2025-01-20T17%3A02%3A56Z#L870 + figment = figment.merge(Yaml::file(file)); + } + let config = figment.extract::()?; + Ok(config) + } + + /// Returns a map of all OIDC providers from the Synapse configuration. + /// + /// The keys are the `auth_provider` IDs as they would have been stored in + /// Synapse's database. + /// + /// These are compatible with the `synapse_idp_id` field of + /// [`mas_config::UpstreamOAuth2Provider`]. + #[must_use] + pub fn all_oidc_providers(&self) -> BTreeMap { + let mut out = BTreeMap::new(); + + if let Some(provider) = &self.oidc_config + && provider.has_required_fields() + { + let mut provider = provider.clone(); + // The legacy configuration has an implied IdP ID of `oidc`. + let idp_id = provider.idp_id.take().unwrap_or("oidc".to_owned()); + provider.idp_id = Some(idp_id.clone()); + out.insert(idp_id, provider); + } + + for provider in &self.oidc_providers { + let mut provider = provider.clone(); + let idp_id = match provider.idp_id.take() { + None => "oidc".to_owned(), + Some(idp_id) if idp_id == "oidc" => idp_id, + // Synapse internally prefixes the IdP IDs with `oidc-`. + Some(idp_id) => format!("oidc-{idp_id}"), + }; + provider.idp_id = Some(idp_id.clone()); + out.insert(idp_id, provider); + } + + out + } + + /// Adjust a MAS configuration to match this Synapse configuration. + #[must_use] + pub fn adjust_mas_config( + self, + mut mas_config: mas_config::RootConfig, + rng: &mut impl Rng, + now: DateTime, + ) -> mas_config::RootConfig { + let providers = self.all_oidc_providers(); + for provider in providers.into_values() { + let Some(mas_provider_config) = provider.into_mas_config(rng, now) else { + // TODO: better log message + warn!("Could not convert OIDC provider to MAS config"); + continue; + }; + + mas_config + .upstream_oauth2 + .providers + .push(mas_provider_config); + } + + // TODO: manage when the option is not set + if let Some(enable_3pid_changes) = self.enable_3pid_changes { + mas_config.account.email_change_allowed = enable_3pid_changes; + } + mas_config.account.displayname_change_allowed = self.enable_set_display_name; + if self.password_config.enabled { + mas_config.passwords.enabled = true; + mas_config.passwords.schemes = vec![ + // This is the password hashing scheme synapse uses + PasswordHashingScheme { + version: 1, + algorithm: PasswordAlgorithm::Bcrypt, + cost: self.bcrypt_rounds, + secret: self.password_config.pepper, + secret_file: None, + unicode_normalization: true, + }, + // Use the default algorithm MAS uses as a second hashing scheme, so that users + // will get their password hash upgraded to a more modern algorithm over time + PasswordHashingScheme { + version: 2, + algorithm: PasswordAlgorithm::default(), + cost: None, + secret: None, + secret_file: None, + unicode_normalization: false, + }, + ]; + + mas_config.account.password_registration_enabled = self.enable_registration; + } else { + mas_config.passwords.enabled = false; + } + + if self.enable_registration_captcha { + mas_config.captcha.service = Some(mas_config::CaptchaServiceKind::RecaptchaV2); + mas_config.captcha.site_key = self.recaptcha_public_key; + mas_config.captcha.secret_key = self.recaptcha_private_key; + } + + mas_config.matrix.homeserver = self.server_name; + if let Some(public_baseurl) = self.public_baseurl { + mas_config.matrix.endpoint = public_baseurl; + } + + mas_config + } +} + +/// The `database` section of the Synapse configuration. +/// +/// See: +#[derive(Deserialize)] +pub struct DatabaseSection { + /// Expecting `psycopg2` for Postgres or `sqlite3` for `SQLite3`, but may be + /// an arbitrary string and future versions of Synapse may support other + /// database drivers, e.g. psycopg3. + pub name: String, + #[serde(default)] + pub args: DatabaseArgsSuboption, +} + +/// The database driver name for Synapse when it is using Postgres via psycopg2. +pub const SYNAPSE_DATABASE_DRIVER_NAME_PSYCOPG2: &str = "psycopg2"; +/// The database driver name for Synapse when it is using SQLite 3. +pub const SYNAPSE_DATABASE_DRIVER_NAME_SQLITE3: &str = "sqlite3"; + +impl DatabaseSection { + /// Process the configuration into Postgres connection options. + /// + /// Environment variables and libpq defaults will be used as fallback for + /// any missing values; this should match what Synapse does. + /// But note that if syn2mas is not run in the same context (host, user, + /// environment variables) as Synapse normally runs, then the connection + /// options may not be valid. + /// + /// # Errors + /// + /// Returns an error if this database configuration is invalid or + /// unsupported. + pub fn to_sqlx_postgres(&self) -> Result { + if self.name != SYNAPSE_DATABASE_DRIVER_NAME_PSYCOPG2 { + anyhow::bail!("syn2mas does not support the {} database driver", self.name); + } + + if self.args.database.is_some() && self.args.dbname.is_some() { + anyhow::bail!( + "Only one of `database` and `dbname` may be specified in the Synapse database configuration, not both." + ); + } + + let mut opts = PgConnectOptions::new().application_name("syn2mas-synapse"); + + if let Some(host) = &self.args.host { + opts = opts.host(host); + } + if let Some(port) = self.args.port { + opts = opts.port(port); + } + if let Some(dbname) = &self.args.dbname { + opts = opts.database(dbname); + } + if let Some(database) = &self.args.database { + opts = opts.database(database); + } + if let Some(user) = &self.args.user { + opts = opts.username(user); + } + if let Some(password) = &self.args.password { + opts = opts.password(password); + } + + Ok(opts) + } +} + +/// The `args` suboption of the `database` section of the Synapse configuration. +/// This struct assumes Postgres is in use and does not represent fields used by +/// SQLite. +#[derive(Deserialize, Default)] +pub struct DatabaseArgsSuboption { + pub user: Option, + pub password: Option, + pub dbname: Option, + // This is a deperecated way of specifying the database name. + pub database: Option, + pub host: Option, + pub port: Option, +} + +/// The `password_config` section of the Synapse configuration. +/// +/// See: +#[derive(Deserialize)] +pub struct PasswordSection { + #[serde(default = "default_true")] + pub enabled: bool, + #[serde(default = "default_true")] + pub localdb_enabled: bool, + pub pepper: Option, +} + +impl Default for PasswordSection { + fn default() -> Self { + PasswordSection { + enabled: true, + localdb_enabled: true, + pepper: None, + } + } +} + +/// A section that we only care about whether it's enabled or not, but is not +/// enabled by default. +#[derive(Default, Deserialize)] +pub struct EnableableSection { + #[serde(default)] + pub enabled: bool, +} + +fn default_true() -> bool { + true +} + +#[cfg(test)] +mod test { + use sqlx::postgres::PgConnectOptions; + + use super::{DatabaseArgsSuboption, DatabaseSection}; + + #[test] + fn test_to_sqlx_postgres() { + #[track_caller] + #[expect(clippy::needless_pass_by_value)] + fn assert_eq_options(config: DatabaseSection, uri: &str) { + let config_connect_options = config + .to_sqlx_postgres() + .expect("no connection options generated by config"); + let uri_connect_options: PgConnectOptions = uri + .parse() + .expect("example URI did not parse as PgConnectionOptions"); + + assert_eq!( + config_connect_options.get_host(), + uri_connect_options.get_host() + ); + assert_eq!( + config_connect_options.get_port(), + uri_connect_options.get_port() + ); + assert_eq!( + config_connect_options.get_username(), + uri_connect_options.get_username() + ); + // The password is not public so we can't assert it. But that's hopefully fine. + assert_eq!( + config_connect_options.get_database(), + uri_connect_options.get_database() + ); + } + + // SQLite configs are not accepted + assert!( + DatabaseSection { + name: "sqlite3".to_owned(), + args: DatabaseArgsSuboption::default(), + } + .to_sqlx_postgres() + .is_err() + ); + + // Only one of `database` and `dbname` may be specified + assert!( + DatabaseSection { + name: "psycopg2".to_owned(), + args: DatabaseArgsSuboption { + user: Some("synapse_user".to_owned()), + password: Some("verysecret".to_owned()), + dbname: Some("synapse_db".to_owned()), + database: Some("synapse_db".to_owned()), + host: Some("synapse-db.example.com".to_owned()), + port: Some(42), + }, + } + .to_sqlx_postgres() + .is_err() + ); + + assert_eq_options( + DatabaseSection { + name: "psycopg2".to_owned(), + args: DatabaseArgsSuboption::default(), + }, + "postgresql:///", + ); + assert_eq_options( + DatabaseSection { + name: "psycopg2".to_owned(), + args: DatabaseArgsSuboption { + user: Some("synapse_user".to_owned()), + password: Some("verysecret".to_owned()), + dbname: Some("synapse_db".to_owned()), + database: None, + host: Some("synapse-db.example.com".to_owned()), + port: Some(42), + }, + }, + "postgresql://synapse_user:verysecret@synapse-db.example.com:42/synapse_db", + ); + } +} + +/// We don't care about any of the fields in this section, +/// just whether it's present. +#[derive(Deserialize)] +pub struct UserConsentSection {} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/config/oidc.rs b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/config/oidc.rs new file mode 100644 index 00000000..09baba16 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/config/oidc.rs @@ -0,0 +1,352 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::BTreeMap, str::FromStr as _}; + +use chrono::{DateTime, Utc}; +use mas_config::{ + ClientSecret, UpstreamOAuth2ClaimsImports, UpstreamOAuth2DiscoveryMode, + UpstreamOAuth2ImportAction, UpstreamOAuth2OnBackchannelLogout, UpstreamOAuth2PkceMethod, + UpstreamOAuth2ResponseMode, UpstreamOAuth2TokenAuthMethod, +}; +use mas_iana::jose::JsonWebSignatureAlg; +use oauth2_types::scope::{OPENID, Scope, ScopeToken}; +use rand::Rng; +use serde::Deserialize; +use tracing::warn; +use ulid::Ulid; +use url::Url; + +#[derive(Clone, Deserialize, Default)] +enum UserMappingProviderModule { + #[default] + #[serde(rename = "synapse.handlers.oidc.JinjaOidcMappingProvider")] + Jinja, + + #[serde(rename = "synapse.handlers.oidc_handler.JinjaOidcMappingProvider")] + JinjaLegacy, + + #[serde(other)] + Other, +} + +#[derive(Clone, Deserialize, Default)] +struct UserMappingProviderConfig { + subject_template: Option, + subject_claim: Option, + localpart_template: Option, + display_name_template: Option, + email_template: Option, + + #[serde(default)] + confirm_localpart: bool, +} + +impl UserMappingProviderConfig { + fn into_mas_config(self) -> UpstreamOAuth2ClaimsImports { + let mut config = UpstreamOAuth2ClaimsImports::default(); + + match (self.subject_claim, self.subject_template) { + (Some(_), Some(subject_template)) => { + warn!( + "Both `subject_claim` and `subject_template` options are set, using `subject_template`." + ); + config.subject.template = Some(subject_template); + } + (None, Some(subject_template)) => { + config.subject.template = Some(subject_template); + } + (Some(subject_claim), None) => { + config.subject.template = Some(format!("{{{{ user.{subject_claim} }}}}")); + } + (None, None) => {} + } + + if let Some(localpart_template) = self.localpart_template { + config.localpart.template = Some(localpart_template); + config.localpart.action = if self.confirm_localpart { + UpstreamOAuth2ImportAction::Suggest + } else { + UpstreamOAuth2ImportAction::Require + }; + } + + if let Some(displayname_template) = self.display_name_template { + config.displayname.template = Some(displayname_template); + config.displayname.action = if self.confirm_localpart { + UpstreamOAuth2ImportAction::Suggest + } else { + UpstreamOAuth2ImportAction::Force + }; + } + + if let Some(email_template) = self.email_template { + config.email.template = Some(email_template); + config.email.action = if self.confirm_localpart { + UpstreamOAuth2ImportAction::Suggest + } else { + UpstreamOAuth2ImportAction::Force + }; + } + + config + } +} + +#[derive(Clone, Deserialize, Default)] +struct UserMappingProvider { + #[serde(default)] + module: UserMappingProviderModule, + #[serde(default)] + config: UserMappingProviderConfig, +} + +#[derive(Clone, Deserialize, Default)] +#[serde(rename_all = "lowercase")] +enum PkceMethod { + #[default] + Auto, + Always, + Never, + #[serde(other)] + Other, +} + +#[derive(Clone, Deserialize, Default)] +#[serde(rename_all = "snake_case")] +enum UserProfileMethod { + #[default] + Auto, + UserinfoEndpoint, + #[serde(other)] + Other, +} + +#[derive(Clone, Deserialize)] +#[expect(clippy::struct_excessive_bools)] +pub struct OidcProvider { + pub issuer: Option, + + /// Required, except for the old `oidc_config` where this is implied to be + /// "oidc". + pub idp_id: Option, + + idp_name: Option, + idp_brand: Option, + + #[serde(default = "default_true")] + discover: bool, + + client_id: Option, + client_secret: Option, + + // Unsupported, we want to shout about it + client_secret_path: Option, + + // Unsupported, we want to shout about it + client_secret_jwt_key: Option, + client_auth_method: Option, + #[serde(default)] + pkce_method: PkceMethod, + // Unsupported, we want to shout about it + id_token_signing_alg_values_supported: Option>, + scopes: Option>, + authorization_endpoint: Option, + token_endpoint: Option, + userinfo_endpoint: Option, + jwks_uri: Option, + #[serde(default)] + skip_verification: bool, + + #[serde(default)] + backchannel_logout_enabled: bool, + + #[serde(default)] + user_profile_method: UserProfileMethod, + + // Unsupported, we want to shout about it + attribute_requirements: Option, + + // Unsupported, we want to shout about it + #[serde(default = "default_true")] + enable_registration: bool, + #[serde(default)] + additional_authorization_parameters: BTreeMap, + #[serde(default)] + forward_login_hint: bool, + #[serde(default)] + user_mapping_provider: UserMappingProvider, +} + +fn default_true() -> bool { + true +} + +impl OidcProvider { + /// Returns true if the two 'required' fields are set. This is used to + /// ignore an empty dict on the `oidc_config` section. + #[must_use] + pub(crate) fn has_required_fields(&self) -> bool { + self.issuer.is_some() && self.client_id.is_some() + } + + /// Map this Synapse OIDC provider config to a MAS upstream provider config. + pub(crate) fn into_mas_config( + self, + rng: &mut impl Rng, + now: DateTime, + ) -> Option { + let client_id = self.client_id?; + + if self.client_secret_path.is_some() { + warn!( + "The `client_secret_path` option is not supported, ignoring. You *will* need to include the secret in the `client_secret` field." + ); + } + + if self.client_secret_jwt_key.is_some() { + warn!("The `client_secret_jwt_key` option is not supported, ignoring."); + } + + if self.attribute_requirements.is_some() { + warn!("The `attribute_requirements` option is not supported, ignoring."); + } + + if self.id_token_signing_alg_values_supported.is_some() { + warn!("The `id_token_signing_alg_values_supported` option is not supported, ignoring."); + } + + if !self.enable_registration { + warn!( + "Setting the `enable_registration` option to `false` is not supported, ignoring." + ); + } + + let scope: Scope = match self.scopes { + None => [OPENID].into_iter().collect(), // Synapse defaults to the 'openid' scope + Some(scopes) => scopes + .into_iter() + .filter_map(|scope| match ScopeToken::from_str(&scope) { + Ok(scope) => Some(scope), + Err(err) => { + warn!("OIDC provider scope '{scope}' is invalid: {err}"); + None + } + }) + .collect(), + }; + + let id = Ulid::from_datetime_with_source(now.into(), rng); + + let token_endpoint_auth_method = self.client_auth_method.unwrap_or_else(|| { + // The token auth method defaults to 'none' if no client_secret is set and + // 'client_secret_basic' otherwise + if self.client_secret.is_some() { + UpstreamOAuth2TokenAuthMethod::ClientSecretBasic + } else { + UpstreamOAuth2TokenAuthMethod::None + } + }); + + let discovery_mode = match (self.discover, self.skip_verification) { + (true, false) => UpstreamOAuth2DiscoveryMode::Oidc, + (true, true) => UpstreamOAuth2DiscoveryMode::Insecure, + (false, _) => UpstreamOAuth2DiscoveryMode::Disabled, + }; + + let pkce_method = match self.pkce_method { + PkceMethod::Auto => UpstreamOAuth2PkceMethod::Auto, + PkceMethod::Always => UpstreamOAuth2PkceMethod::Always, + PkceMethod::Never => UpstreamOAuth2PkceMethod::Never, + PkceMethod::Other => { + warn!( + "The `pkce_method` option is not supported, expected 'auto', 'always', or 'never'; assuming 'auto'." + ); + UpstreamOAuth2PkceMethod::default() + } + }; + + // "auto" doesn't mean the same thing depending on whether we request the openid + // scope or not + let has_openid_scope = scope.contains(&OPENID); + let fetch_userinfo = match self.user_profile_method { + UserProfileMethod::Auto => has_openid_scope, + UserProfileMethod::UserinfoEndpoint => true, + UserProfileMethod::Other => { + warn!( + "The `user_profile_method` option is not supported, expected 'auto' or 'userinfo_endpoint'; assuming 'auto'." + ); + has_openid_scope + } + }; + + // Check if there is a `response_mode` set in the additional authorization + // parameters + let mut additional_authorization_parameters = self.additional_authorization_parameters; + let response_mode = if let Some(response_mode) = + additional_authorization_parameters.remove("response_mode") + { + match response_mode.to_ascii_lowercase().as_str() { + "query" => Some(UpstreamOAuth2ResponseMode::Query), + "form_post" => Some(UpstreamOAuth2ResponseMode::FormPost), + _ => { + warn!( + "Invalid `response_mode` in the `additional_authorization_parameters` option, expected 'query' or 'form_post'; ignoring." + ); + None + } + } + } else { + None + }; + + let claims_imports = if matches!( + self.user_mapping_provider.module, + UserMappingProviderModule::Other + ) { + warn!( + "The `user_mapping_provider` module specified is not supported, ignoring. Please adjust the `claims_imports` to match the mapping provider behaviour." + ); + UpstreamOAuth2ClaimsImports::default() + } else { + self.user_mapping_provider.config.into_mas_config() + }; + + let on_backchannel_logout = if self.backchannel_logout_enabled { + UpstreamOAuth2OnBackchannelLogout::DoNothing + } else { + UpstreamOAuth2OnBackchannelLogout::LogoutBrowserOnly + }; + + Some(mas_config::UpstreamOAuth2Provider { + enabled: true, + id, + synapse_idp_id: self.idp_id, + issuer: self.issuer, + human_name: self.idp_name, + brand_name: self.idp_brand, + client_id, + client_secret: self.client_secret.map(ClientSecret::Value), + token_endpoint_auth_method, + sign_in_with_apple: None, + token_endpoint_auth_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + scope: scope.to_string(), + discovery_mode, + pkce_method, + fetch_userinfo, + userinfo_signed_response_alg: None, + authorization_endpoint: self.authorization_endpoint, + userinfo_endpoint: self.userinfo_endpoint, + token_endpoint: self.token_endpoint, + jwks_uri: self.jwks_uri, + response_mode, + claims_imports, + additional_authorization_parameters, + forward_login_hint: self.forward_login_hint, + on_backchannel_logout, + }) + } +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice.sql b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice.sql new file mode 100644 index 00000000..4926445b --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice.sql @@ -0,0 +1,19 @@ +-- Copyright 2024, 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +INSERT INTO access_tokens + ( + id, + user_id, + device_id, + token + ) + VALUES + ( + 42, + '@alice:example.com', + 'ADEVICE', + 'syt_aaaaaaaaaaaaaa_aaaa' + ); diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_puppet.sql b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_puppet.sql new file mode 100644 index 00000000..6029f94f --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_puppet.sql @@ -0,0 +1,21 @@ +-- Copyright 2024, 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +INSERT INTO access_tokens + ( + id, + user_id, + device_id, + token, + puppets_user_id + ) + VALUES + ( + 42, + '@alice:example.com', + NULL, + 'syt_pupupupupup_eett', + '@bob:example.com' + ); diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_refresh_token.sql b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_refresh_token.sql new file mode 100644 index 00000000..bba684b1 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_refresh_token.sql @@ -0,0 +1,61 @@ +-- Copyright 2024, 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +INSERT INTO access_tokens + ( + id, + user_id, + device_id, + token, + refresh_token_id, + used + ) + VALUES + ( + 42, + '@alice:example.com', + 'ADEVICE', + 'syt_aaaaaaaaaaaaaa_aaaa', + 7, + TRUE + ), + ( + 43, + '@alice:example.com', + 'ADEVICE', + 'syt_AAAAAAAAAAAAAA_AAAA', + 8, + TRUE + ); + +INSERT INTO refresh_tokens + ( + id, + user_id, + device_id, + token, + next_token_id, + expiry_ts, + ultimate_session_expiry_ts + ) + VALUES + ( + 7, + '@alice:example.com', + 'ADEVICE', + 'syr_bbbbbbbbbbbbb_bbbb', + 8, + 1738096199000, + 1778096199000 + ), + ( + 8, + '@alice:example.com', + 'ADEVICE', + 'syr_cccccccccccc_cccc', + NULL, + 1748096199000, + 1778096199000 + ); diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_unused_refresh_token.sql b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_unused_refresh_token.sql new file mode 100644 index 00000000..e1de6b28 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/access_token_alice_with_unused_refresh_token.sql @@ -0,0 +1,61 @@ +-- Copyright 2024, 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +INSERT INTO access_tokens + ( + id, + user_id, + device_id, + token, + refresh_token_id, + used + ) + VALUES + ( + 42, + '@alice:example.com', + 'ADEVICE', + 'syt_aaaaaaaaaaaaaa_aaaa', + 7, + TRUE + ), + ( + 43, + '@alice:example.com', + 'ADEVICE', + 'syt_AAAAAAAAAAAAAA_AAAA', + 8, + FALSE + ); + +INSERT INTO refresh_tokens + ( + id, + user_id, + device_id, + token, + next_token_id, + expiry_ts, + ultimate_session_expiry_ts + ) + VALUES + ( + 7, + '@alice:example.com', + 'ADEVICE', + 'syr_bbbbbbbbbbbbb_bbbb', + 8, + 1738096199000, + 1778096199000 + ), + ( + 8, + '@alice:example.com', + 'ADEVICE', + 'syr_cccccccccccc_cccc', + NULL, + 1748096199000, + 1778096199000 + ); diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/devices_alice.sql b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/devices_alice.sql new file mode 100644 index 00000000..411c6ba3 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/devices_alice.sql @@ -0,0 +1,43 @@ +-- Copyright 2024, 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +INSERT INTO devices + ( + user_id, + device_id, + display_name, + last_seen, + ip, + user_agent, + hidden + ) + VALUES + ( + '@alice:example.com', + 'ADEVICE', + 'Matrix Console', + 1623366000000, + '203.0.113.1', + 'Browser/5.0 (X12; ComputerOS 64; rv:1024.0)', + FALSE + ), + ( + '@alice:example.com', + 'master signing key', + NULL, + NULL, + NULL, + NULL, + TRUE + ), + ( + '@alice:example.com', + 'self_signing signing key', + NULL, + NULL, + NULL, + NULL, + TRUE + ); diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/external_ids_alice.sql b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/external_ids_alice.sql new file mode 100644 index 00000000..651f03cf --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/external_ids_alice.sql @@ -0,0 +1,17 @@ +-- Copyright 2024, 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +INSERT INTO user_external_ids + ( + user_id, + auth_provider, + external_id + ) + VALUES + ( + '@alice:example.com', + 'oidc-raasu', + '871.syn30' + ); diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/threepids_alice.sql b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/threepids_alice.sql new file mode 100644 index 00000000..5b643a46 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/threepids_alice.sql @@ -0,0 +1,28 @@ +-- Copyright 2024, 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +INSERT INTO user_threepids + ( + user_id, + medium, + address, + validated_at, + added_at + ) + VALUES + ( + '@alice:example.com', + 'email', + 'alice@example.com', + 1554228492026, + 1554228549014 + ), + ( + '@alice:example.com', + 'msisdn', + '441189998819991197253', + 1555228492026, + 1555228549014 + ); diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/user_alice.sql b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/user_alice.sql new file mode 100644 index 00000000..82521340 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/fixtures/user_alice.sql @@ -0,0 +1,43 @@ +-- Copyright 2024, 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +INSERT INTO users + ( + name, + password_hash, + creation_ts, + admin, + upgrade_ts, + is_guest, + appservice_id, + consent_version, + consent_server_notice_sent, + user_type, + deactivated, + shadow_banned, + consent_ts, + approved, + locked, + suspended + ) + VALUES + ( + '@alice:example.com', + '$2b$12$aaa/aaaaaaaaaa.aaaaaaaaaaaaaaa./aaaaaaaaaaaaaaaaaaa/A', + 1530393962, + 0, + NULL, + 0, + NULL, + '1.0', + '1.0', + NULL, + 0, + NULL, + NULL, + NULL, + false, + false + ); diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/mod.rs b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/mod.rs new file mode 100644 index 00000000..b5b69149 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/mod.rs @@ -0,0 +1,730 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! # Synapse Database Reader +//! +//! This module provides facilities for streaming relevant types of database +//! records from a Synapse database. + +use std::fmt::Display; + +use chrono::{DateTime, Utc}; +use futures_util::{Stream, TryStreamExt}; +use sqlx::{Acquire, FromRow, PgConnection, Postgres, Transaction, Type, query}; +use thiserror::Error; +use thiserror_ext::ContextInto; + +pub mod checks; +pub mod config; + +#[derive(Debug, Error, ContextInto)] +pub enum Error { + #[error("database error whilst {context}")] + Database { + #[source] + source: sqlx::Error, + context: String, + }, +} + +#[derive(Clone, Debug, sqlx::Decode, PartialEq, Eq, PartialOrd, Ord)] +pub struct FullUserId(pub String); + +impl Display for FullUserId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +impl Type for FullUserId { + fn type_info() -> ::TypeInfo { + >::type_info() + } +} + +#[derive(Debug, Error)] +pub enum ExtractLocalpartError { + #[error("user ID does not start with `@` sigil")] + NoAtSigil, + #[error("user ID does not have a `:` separator")] + NoSeparator, + #[error("wrong server name: expected {expected:?}, got {found:?}")] + WrongServerName { expected: String, found: String }, +} + +impl FullUserId { + /// Extract the localpart from the User ID, asserting that the User ID has + /// the correct server name. + /// + /// # Errors + /// + /// A handful of basic validity checks are performed and an error may be + /// returned if the User ID is not valid. + /// However, the User ID grammar is not checked fully. + /// + /// If the wrong server name is asserted, returns an error. + pub fn extract_localpart( + &self, + expected_server_name: &str, + ) -> Result<&str, ExtractLocalpartError> { + let Some(without_sigil) = self.0.strip_prefix('@') else { + return Err(ExtractLocalpartError::NoAtSigil); + }; + + let Some((localpart, server_name)) = without_sigil.split_once(':') else { + return Err(ExtractLocalpartError::NoSeparator); + }; + + if server_name != expected_server_name { + return Err(ExtractLocalpartError::WrongServerName { + expected: expected_server_name.to_owned(), + found: server_name.to_owned(), + }); + } + + Ok(localpart) + } +} + +/// A Synapse boolean. +/// Synapse stores booleans as 0 or 1, due to compatibility with old SQLite +/// versions that did not have native boolean support. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct SynapseBool(bool); + +impl<'r> sqlx::Decode<'r, Postgres> for SynapseBool { + fn decode( + value: ::ValueRef<'r>, + ) -> Result { + >::decode(value) + .map(|boolean_int| SynapseBool(boolean_int != 0)) + } +} + +impl sqlx::Type for SynapseBool { + fn type_info() -> ::TypeInfo { + >::type_info() + } +} + +impl From for bool { + fn from(SynapseBool(value): SynapseBool) -> Self { + value + } +} + +/// A timestamp stored as the number of seconds since the Unix epoch. +/// Note that Synapse stores MOST timestamps as numbers of **milliseconds** +/// since the Unix epoch. But some timestamps are still stored in seconds. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct SecondsTimestamp(DateTime); + +impl From for DateTime { + fn from(SecondsTimestamp(value): SecondsTimestamp) -> Self { + value + } +} + +impl<'r> sqlx::Decode<'r, Postgres> for SecondsTimestamp { + fn decode( + value: ::ValueRef<'r>, + ) -> Result { + >::decode(value).map(|seconds_since_epoch| { + SecondsTimestamp(DateTime::from_timestamp_nanos( + seconds_since_epoch * 1_000_000_000, + )) + }) + } +} + +impl sqlx::Type for SecondsTimestamp { + fn type_info() -> ::TypeInfo { + >::type_info() + } +} + +/// A timestamp stored as the number of milliseconds since the Unix epoch. +/// Note that Synapse stores some timestamps in seconds. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct MillisecondsTimestamp(DateTime); + +impl From for DateTime { + fn from(MillisecondsTimestamp(value): MillisecondsTimestamp) -> Self { + value + } +} + +impl<'r> sqlx::Decode<'r, Postgres> for MillisecondsTimestamp { + fn decode( + value: ::ValueRef<'r>, + ) -> Result { + >::decode(value).map(|milliseconds_since_epoch| { + MillisecondsTimestamp(DateTime::from_timestamp_nanos( + milliseconds_since_epoch * 1_000_000, + )) + }) + } +} + +impl sqlx::Type for MillisecondsTimestamp { + fn type_info() -> ::TypeInfo { + >::type_info() + } +} + +#[derive(Clone, Debug, FromRow, PartialEq, Eq, PartialOrd, Ord)] +pub struct SynapseUser { + /// Full User ID of the user + pub name: FullUserId, + /// Password hash string for the user. Optional (null if no password is + /// set). + pub password_hash: Option, + /// Whether the user is a Synapse Admin + pub admin: SynapseBool, + /// Whether the user is deactivated + pub deactivated: SynapseBool, + /// Whether the user is locked + pub locked: bool, + /// When the user was created + pub creation_ts: SecondsTimestamp, + /// Whether the user is a guest. + /// Note that not all numeric user IDs are guests; guests can upgrade their + /// account! + pub is_guest: SynapseBool, + /// The ID of the appservice that created this user, if any. + pub appservice_id: Option, +} + +/// Row of the `user_threepids` table in Synapse. +#[derive(Clone, Debug, FromRow, PartialEq, Eq, PartialOrd, Ord)] +pub struct SynapseThreepid { + pub user_id: FullUserId, + pub medium: String, + pub address: String, + pub added_at: MillisecondsTimestamp, +} + +/// Row of the `user_external_ids` table in Synapse. +#[derive(Clone, Debug, FromRow, PartialEq, Eq, PartialOrd, Ord)] +pub struct SynapseExternalId { + pub user_id: FullUserId, + pub auth_provider: String, + pub external_id: String, +} + +/// Row of the `devices` table in Synapse. +#[derive(Clone, Debug, FromRow, PartialEq, Eq, PartialOrd, Ord)] +pub struct SynapseDevice { + pub user_id: FullUserId, + pub device_id: String, + pub display_name: Option, + pub last_seen: Option, + pub ip: Option, + pub user_agent: Option, +} + +/// Row of the `access_tokens` table in Synapse. +#[derive(Clone, Debug, FromRow, PartialEq, Eq, PartialOrd, Ord)] +pub struct SynapseAccessToken { + pub user_id: FullUserId, + pub device_id: Option, + pub token: String, + pub valid_until_ms: Option, + pub last_validated: Option, +} + +/// Row of the `refresh_tokens` table in Synapse. +#[derive(Clone, Debug, FromRow, PartialEq, Eq, PartialOrd, Ord)] +pub struct SynapseRefreshableTokenPair { + pub user_id: FullUserId, + pub device_id: String, + pub access_token: String, + pub refresh_token: String, + pub valid_until_ms: Option, + pub last_validated: Option, +} + +/// List of Synapse tables that we should acquire an `EXCLUSIVE` lock on. +/// +/// This is a safety measure against other processes changing the data +/// underneath our feet. It's still not a good idea to run Synapse at the same +/// time as the migration. +const TABLES_TO_LOCK: &[&str] = &[ + "users", + "user_threepids", + "user_external_ids", + "devices", + "access_tokens", + "refresh_tokens", +]; + +/// Number of migratable rows in various Synapse tables. +/// Used to estimate progress. +#[derive(Clone, Debug)] +pub struct SynapseRowCounts { + pub users: usize, + pub devices: usize, + pub threepids: usize, + pub external_ids: usize, + pub access_tokens: usize, + pub refresh_tokens: usize, +} + +pub struct SynapseReader<'c> { + txn: Transaction<'c, Postgres>, +} + +impl<'conn> SynapseReader<'conn> { + /// Create a new Synapse reader, which entails creating a transaction and + /// locking Synapse tables. + /// + /// # Errors + /// + /// Errors are returned under the following circumstances: + /// + /// - An underlying database error + /// - If we can't lock the Synapse tables (pointing to the fact that Synapse + /// may still be running) + pub async fn new( + synapse_connection: &'conn mut PgConnection, + dry_run: bool, + ) -> Result { + let mut txn = synapse_connection + .begin() + .await + .into_database("begin transaction")?; + + query("SET TRANSACTION ISOLATION LEVEL SERIALIZABLE READ ONLY DEFERRABLE;") + .execute(&mut *txn) + .await + .into_database("set transaction")?; + + let lock_type = if dry_run { + // We expect dry runs to be done alongside Synapse running, so we don't want to + // interfere with Synapse's database access in that case. + "ACCESS SHARE" + } else { + "EXCLUSIVE" + }; + for table in TABLES_TO_LOCK { + query(&format!("LOCK TABLE {table} IN {lock_type} MODE NOWAIT;")) + .execute(&mut *txn) + .await + .into_database_with(|| format!("locking Synapse table `{table}`"))?; + } + + Ok(Self { txn }) + } + + /// Finishes the Synapse reader, committing the transaction. + /// + /// # Errors + /// + /// Errors are returned under the following circumstances: + /// + /// - An underlying database error whilst committing the transaction. + pub async fn finish(self) -> Result<(), Error> { + self.txn.commit().await.into_database("end transaction")?; + Ok(()) + } + + /// Counts the rows in the Synapse database to get an estimate of how large + /// the migration is going to be. + /// + /// # Errors + /// + /// Errors are returned under the following circumstances: + /// + /// - An underlying database error + pub async fn count_rows(&mut self) -> Result { + // We don't get to filter out application service users by using this estimate, + // which is a shame, but on a large database this is way faster. + // On matrix.org, counting users and devices properly takes around 1m10s, + // which is unnecessary extra downtime during the migration, just to + // show a more accurate progress bar and size a hash map accurately. + let users = sqlx::query_scalar::<_, i64>( + " + SELECT reltuples::bigint AS estimate FROM pg_class WHERE oid = 'users'::regclass; + ", + ) + .fetch_one(&mut *self.txn) + .await + .into_database("estimating count of users")? + .max(0) + .try_into() + .unwrap_or(usize::MAX); + + let devices = sqlx::query_scalar::<_, i64>( + " + SELECT reltuples::bigint AS estimate FROM pg_class WHERE oid = 'devices'::regclass; + ", + ) + .fetch_one(&mut *self.txn) + .await + .into_database("estimating count of devices")? + .max(0) + .try_into() + .unwrap_or(usize::MAX); + + let threepids = sqlx::query_scalar::<_, i64>( + " + SELECT reltuples::bigint AS estimate FROM pg_class WHERE oid = 'user_threepids'::regclass; + " + ) + .fetch_one(&mut *self.txn) + .await + .into_database("estimating count of threepids")? + .max(0) + .try_into() + .unwrap_or(usize::MAX); + + let access_tokens = sqlx::query_scalar::<_, i64>( + " + SELECT reltuples::bigint AS estimate FROM pg_class WHERE oid = 'access_tokens'::regclass; + " + ) + .fetch_one(&mut *self.txn) + .await + .into_database("estimating count of access tokens")? + .max(0) + .try_into() + .unwrap_or(usize::MAX); + + let refresh_tokens = sqlx::query_scalar::<_, i64>( + " + SELECT reltuples::bigint AS estimate FROM pg_class WHERE oid = 'refresh_tokens'::regclass; + " + ) + .fetch_one(&mut *self.txn) + .await + .into_database("estimating count of refresh tokens")? + .max(0) + .try_into() + .unwrap_or(usize::MAX); + + let external_ids = sqlx::query_scalar::<_, i64>( + " + SELECT reltuples::bigint AS estimate FROM pg_class WHERE oid = 'user_external_ids'::regclass; + " + ) + .fetch_one(&mut *self.txn) + .await + .into_database("estimating count of external IDs")? + .max(0) + .try_into() + .unwrap_or(usize::MAX); + + Ok(SynapseRowCounts { + users, + devices, + threepids, + external_ids, + access_tokens, + refresh_tokens, + }) + } + + /// Reads Synapse users, excluding application service users (which do not + /// need to be migrated), from the database. + pub fn read_users(&mut self) -> impl Stream> + '_ { + sqlx::query_as( + " + SELECT + name, password_hash, admin, deactivated, locked, creation_ts, is_guest, appservice_id + FROM users + ", + ) + .fetch(&mut *self.txn) + .map_err(|err| err.into_database("reading Synapse users")) + } + + /// Reads threepids (such as e-mail and phone number associations) from + /// Synapse. + pub fn read_threepids(&mut self) -> impl Stream> + '_ { + sqlx::query_as( + " + SELECT + user_id, medium, address, added_at + FROM user_threepids + ", + ) + .fetch(&mut *self.txn) + .map_err(|err| err.into_database("reading Synapse threepids")) + } + + /// Read associations between Synapse users and external identity providers + pub fn read_user_external_ids( + &mut self, + ) -> impl Stream> + '_ { + sqlx::query_as( + " + SELECT + user_id, auth_provider, external_id + FROM user_external_ids + ", + ) + .fetch(&mut *self.txn) + .map_err(|err| err.into_database("reading Synapse user external IDs")) + } + + /// Reads devices from the Synapse database. + /// Does not include so-called 'hidden' devices, which are just a mechanism + /// for storing various signing keys shared between the real devices. + pub fn read_devices(&mut self) -> impl Stream> + '_ { + sqlx::query_as( + " + SELECT + user_id, device_id, display_name, last_seen, ip, user_agent + FROM devices + WHERE NOT hidden AND device_id != 'guest_device' + ", + ) + .fetch(&mut *self.txn) + .map_err(|err| err.into_database("reading Synapse devices")) + } + + /// Reads unrefreshable access tokens from the Synapse database. + /// This does not include access tokens used for puppetting users, as those + /// are not supported by MAS. + /// + /// This also excludes access tokens whose referenced device ID does not + /// exist, except for deviceless access tokens. + /// (It's unclear what mechanism led to these, but since Synapse has no + /// foreign key constraints and is not consistently atomic about this, + /// it should be no surprise really) + pub fn read_unrefreshable_access_tokens( + &mut self, + ) -> impl Stream> + '_ { + sqlx::query_as( + " + SELECT + at0.user_id, at0.device_id, at0.token, at0.valid_until_ms, at0.last_validated + FROM access_tokens at0 + INNER JOIN devices USING (user_id, device_id) + WHERE at0.puppets_user_id IS NULL AND at0.refresh_token_id IS NULL + + UNION ALL + + SELECT + at0.user_id, at0.device_id, at0.token, at0.valid_until_ms, at0.last_validated + FROM access_tokens at0 + WHERE at0.puppets_user_id IS NULL AND at0.refresh_token_id IS NULL AND at0.device_id IS NULL + ", + ) + .fetch(&mut *self.txn) + .map_err(|err| err.into_database("reading Synapse access tokens")) + } + + /// Reads (access token, refresh token) pairs from the Synapse database. + /// This does not include token pairs which have been made obsolete + /// by using the refresh token and then acknowledging the + /// successor access token by using it to authenticate a request. + /// + /// The `expiry_ts` and `ultimate_session_expiry_ts` columns are ignored as + /// they are not implemented in MAS. + /// Further, they are unused by any real-world deployment to the best of + /// our knowledge. + pub fn read_refreshable_token_pairs( + &mut self, + ) -> impl Stream> + '_ { + sqlx::query_as( + " + SELECT + rt0.user_id, rt0.device_id, at0.token AS access_token, rt0.token AS refresh_token, at0.valid_until_ms, at0.last_validated + FROM refresh_tokens rt0 + INNER JOIN devices USING (user_id, device_id) + INNER JOIN access_tokens at0 ON at0.refresh_token_id = rt0.id AND at0.user_id = rt0.user_id AND at0.device_id = rt0.device_id + LEFT JOIN access_tokens at1 ON at1.refresh_token_id = rt0.next_token_id + WHERE NOT at1.used OR at1.used IS NULL + ", + ) + .fetch(&mut *self.txn) + .map_err(|err| err.into_database("reading Synapse refresh tokens")) + } +} + +#[cfg(test)] +mod test { + use std::collections::BTreeSet; + + use futures_util::TryStreamExt; + use insta::assert_debug_snapshot; + use sqlx::{PgPool, migrate::Migrator}; + + use crate::{ + SynapseReader, + synapse_reader::{ + SynapseAccessToken, SynapseDevice, SynapseExternalId, SynapseRefreshableTokenPair, + SynapseThreepid, SynapseUser, + }, + }; + + static MIGRATOR: Migrator = sqlx::migrate!("./test_synapse_migrations"); + + #[sqlx::test(migrator = "MIGRATOR", fixtures("user_alice"))] + async fn test_read_users(pool: PgPool) { + let mut conn = pool.acquire().await.expect("failed to get connection"); + let mut reader = SynapseReader::new(&mut conn, false) + .await + .expect("failed to make SynapseReader"); + + let users: BTreeSet = reader + .read_users() + .try_collect() + .await + .expect("failed to read Synapse users"); + + assert_debug_snapshot!(users); + } + + #[sqlx::test(migrator = "MIGRATOR", fixtures("user_alice", "threepids_alice"))] + async fn test_read_threepids(pool: PgPool) { + let mut conn = pool.acquire().await.expect("failed to get connection"); + let mut reader = SynapseReader::new(&mut conn, false) + .await + .expect("failed to make SynapseReader"); + + let threepids: BTreeSet = reader + .read_threepids() + .try_collect() + .await + .expect("failed to read Synapse threepids"); + + assert_debug_snapshot!(threepids); + } + + #[sqlx::test(migrator = "MIGRATOR", fixtures("user_alice", "external_ids_alice"))] + async fn test_read_external_ids(pool: PgPool) { + let mut conn = pool.acquire().await.expect("failed to get connection"); + let mut reader = SynapseReader::new(&mut conn, false) + .await + .expect("failed to make SynapseReader"); + + let external_ids: BTreeSet = reader + .read_user_external_ids() + .try_collect() + .await + .expect("failed to read Synapse external user IDs"); + + assert_debug_snapshot!(external_ids); + } + + #[sqlx::test(migrator = "MIGRATOR", fixtures("user_alice", "devices_alice"))] + async fn test_read_devices(pool: PgPool) { + let mut conn = pool.acquire().await.expect("failed to get connection"); + let mut reader = SynapseReader::new(&mut conn, false) + .await + .expect("failed to make SynapseReader"); + + let devices: BTreeSet = reader + .read_devices() + .try_collect() + .await + .expect("failed to read Synapse devices"); + + assert_debug_snapshot!(devices); + } + + #[sqlx::test( + migrator = "MIGRATOR", + fixtures("user_alice", "devices_alice", "access_token_alice") + )] + async fn test_read_access_token(pool: PgPool) { + let mut conn = pool.acquire().await.expect("failed to get connection"); + let mut reader = SynapseReader::new(&mut conn, false) + .await + .expect("failed to make SynapseReader"); + + let access_tokens: BTreeSet = reader + .read_unrefreshable_access_tokens() + .try_collect() + .await + .expect("failed to read Synapse access tokens"); + + assert_debug_snapshot!(access_tokens); + } + + /// Tests that puppetting access tokens are ignored. + #[sqlx::test( + migrator = "MIGRATOR", + fixtures("user_alice", "devices_alice", "access_token_alice_with_puppet") + )] + async fn test_read_access_token_puppet(pool: PgPool) { + let mut conn = pool.acquire().await.expect("failed to get connection"); + let mut reader = SynapseReader::new(&mut conn, false) + .await + .expect("failed to make SynapseReader"); + + let access_tokens: BTreeSet = reader + .read_unrefreshable_access_tokens() + .try_collect() + .await + .expect("failed to read Synapse access tokens"); + + assert!(access_tokens.is_empty()); + } + + #[sqlx::test( + migrator = "MIGRATOR", + fixtures("user_alice", "devices_alice", "access_token_alice_with_refresh_token") + )] + async fn test_read_access_and_refresh_tokens(pool: PgPool) { + let mut conn = pool.acquire().await.expect("failed to get connection"); + let mut reader = SynapseReader::new(&mut conn, false) + .await + .expect("failed to make SynapseReader"); + + let access_tokens: BTreeSet = reader + .read_unrefreshable_access_tokens() + .try_collect() + .await + .expect("failed to read Synapse access tokens"); + + let refresh_tokens: BTreeSet = reader + .read_refreshable_token_pairs() + .try_collect() + .await + .expect("failed to read Synapse refresh tokens"); + + assert!( + access_tokens.is_empty(), + "there are no unrefreshable access tokens" + ); + assert_debug_snapshot!(refresh_tokens); + } + + #[sqlx::test( + migrator = "MIGRATOR", + fixtures( + "user_alice", + "devices_alice", + "access_token_alice_with_unused_refresh_token" + ) + )] + async fn test_read_access_and_unused_refresh_tokens(pool: PgPool) { + let mut conn = pool.acquire().await.expect("failed to get connection"); + let mut reader = SynapseReader::new(&mut conn, false) + .await + .expect("failed to make SynapseReader"); + + let access_tokens: BTreeSet = reader + .read_unrefreshable_access_tokens() + .try_collect() + .await + .expect("failed to read Synapse access tokens"); + + let refresh_tokens: BTreeSet = reader + .read_refreshable_token_pairs() + .try_collect() + .await + .expect("failed to read Synapse refresh tokens"); + + assert!( + access_tokens.is_empty(), + "there are no unrefreshable access tokens" + ); + assert_debug_snapshot!(refresh_tokens); + } +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_and_refresh_tokens.snap b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_and_refresh_tokens.snap new file mode 100644 index 00000000..fa0ce3a1 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_and_refresh_tokens.snap @@ -0,0 +1,16 @@ +--- +source: crates/syn2mas/src/synapse_reader/mod.rs +expression: refresh_tokens +--- +{ + SynapseRefreshableTokenPair { + user_id: FullUserId( + "@alice:example.com", + ), + device_id: "ADEVICE", + access_token: "syt_AAAAAAAAAAAAAA_AAAA", + refresh_token: "syr_cccccccccccc_cccc", + valid_until_ms: None, + last_validated: None, + }, +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_and_unused_refresh_tokens.snap b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_and_unused_refresh_tokens.snap new file mode 100644 index 00000000..cb34a593 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_and_unused_refresh_tokens.snap @@ -0,0 +1,26 @@ +--- +source: crates/syn2mas/src/synapse_reader/mod.rs +expression: refresh_tokens +--- +{ + SynapseRefreshableTokenPair { + user_id: FullUserId( + "@alice:example.com", + ), + device_id: "ADEVICE", + access_token: "syt_AAAAAAAAAAAAAA_AAAA", + refresh_token: "syr_cccccccccccc_cccc", + valid_until_ms: None, + last_validated: None, + }, + SynapseRefreshableTokenPair { + user_id: FullUserId( + "@alice:example.com", + ), + device_id: "ADEVICE", + access_token: "syt_aaaaaaaaaaaaaa_aaaa", + refresh_token: "syr_bbbbbbbbbbbbb_bbbb", + valid_until_ms: None, + last_validated: None, + }, +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_token.snap b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_token.snap new file mode 100644 index 00000000..038f6bde --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_access_token.snap @@ -0,0 +1,17 @@ +--- +source: crates/syn2mas/src/synapse_reader/mod.rs +expression: access_tokens +--- +{ + SynapseAccessToken { + user_id: FullUserId( + "@alice:example.com", + ), + device_id: Some( + "ADEVICE", + ), + token: "syt_aaaaaaaaaaaaaa_aaaa", + valid_until_ms: None, + last_validated: None, + }, +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_devices.snap b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_devices.snap new file mode 100644 index 00000000..a8ca1dd6 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_devices.snap @@ -0,0 +1,26 @@ +--- +source: crates/syn2mas/src/synapse_reader/mod.rs +expression: devices +--- +{ + SynapseDevice { + user_id: FullUserId( + "@alice:example.com", + ), + device_id: "ADEVICE", + display_name: Some( + "Matrix Console", + ), + last_seen: Some( + MillisecondsTimestamp( + 2021-06-10T23:00:00Z, + ), + ), + ip: Some( + "203.0.113.1", + ), + user_agent: Some( + "Browser/5.0 (X12; ComputerOS 64; rv:1024.0)", + ), + }, +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_external_ids.snap b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_external_ids.snap new file mode 100644 index 00000000..695007d5 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_external_ids.snap @@ -0,0 +1,13 @@ +--- +source: crates/syn2mas/src/synapse_reader/mod.rs +expression: external_ids +--- +{ + SynapseExternalId { + user_id: FullUserId( + "@alice:example.com", + ), + auth_provider: "oidc-raasu", + external_id: "871.syn30", + }, +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_threepids.snap b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_threepids.snap new file mode 100644 index 00000000..b8987495 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_threepids.snap @@ -0,0 +1,26 @@ +--- +source: crates/syn2mas/src/synapse_reader/mod.rs +expression: threepids +--- +{ + SynapseThreepid { + user_id: FullUserId( + "@alice:example.com", + ), + medium: "email", + address: "alice@example.com", + added_at: MillisecondsTimestamp( + 2019-04-02T18:09:09.014Z, + ), + }, + SynapseThreepid { + user_id: FullUserId( + "@alice:example.com", + ), + medium: "msisdn", + address: "441189998819991197253", + added_at: MillisecondsTimestamp( + 2019-04-14T07:55:49.014Z, + ), + }, +} diff --git a/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_users.snap b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_users.snap new file mode 100644 index 00000000..b56f09ab --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/synapse_reader/snapshots/syn2mas__synapse_reader__test__read_users.snap @@ -0,0 +1,28 @@ +--- +source: crates/syn2mas/src/synapse_reader/mod.rs +expression: users +--- +{ + SynapseUser { + name: FullUserId( + "@alice:example.com", + ), + password_hash: Some( + "$2b$12$aaa/aaaaaaaaaa.aaaaaaaaaaaaaaa./aaaaaaaaaaaaaaaaaaa/A", + ), + admin: SynapseBool( + false, + ), + deactivated: SynapseBool( + false, + ), + locked: false, + creation_ts: SecondsTimestamp( + 2018-06-30T21:26:02Z, + ), + is_guest: SynapseBool( + false, + ), + appservice_id: None, + }, +} diff --git a/matrix-authentication-service/crates/syn2mas/src/telemetry.rs b/matrix-authentication-service/crates/syn2mas/src/telemetry.rs new file mode 100644 index 00000000..8d67d4bf --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/src/telemetry.rs @@ -0,0 +1,19 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::LazyLock; + +use opentelemetry::{InstrumentationScope, metrics::Meter}; +use opentelemetry_semantic_conventions as semcov; + +static SCOPE: LazyLock = LazyLock::new(|| { + InstrumentationScope::builder(env!("CARGO_PKG_NAME")) + .with_version(env!("CARGO_PKG_VERSION")) + .with_schema_url(semcov::SCHEMA_URL) + .build() +}); + +pub static METER: LazyLock = + LazyLock::new(|| opentelemetry::global::meter_with_scope(SCOPE.clone())); diff --git a/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250117064958_users.sql b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250117064958_users.sql new file mode 100644 index 00000000..72cc46da --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250117064958_users.sql @@ -0,0 +1,23 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. +-- Brings in the `users` table from Synapse +CREATE TABLE users ( + name text, + password_hash text, + creation_ts bigint, + admin smallint DEFAULT 0 NOT NULL, + upgrade_ts bigint, + is_guest smallint DEFAULT 0 NOT NULL, + appservice_id text, + consent_version text, + consent_server_notice_sent text, + user_type text, + deactivated smallint DEFAULT 0 NOT NULL, + shadow_banned boolean, + consent_ts bigint, + approved boolean, + locked boolean DEFAULT false NOT NULL, + suspended boolean DEFAULT false NOT NULL +); diff --git a/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128141011_threepids.sql b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128141011_threepids.sql new file mode 100644 index 00000000..3ee382b3 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128141011_threepids.sql @@ -0,0 +1,14 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +-- Brings in the `user_threepids` table from Synapse + +CREATE TABLE user_threepids ( + user_id text NOT NULL, + medium text NOT NULL, + address text NOT NULL, + validated_at bigint NOT NULL, + added_at bigint NOT NULL +); diff --git a/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128162513_external_ids.sql b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128162513_external_ids.sql new file mode 100644 index 00000000..9054accd --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128162513_external_ids.sql @@ -0,0 +1,12 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +-- Brings in the `user_external_ids` table from Synapse + +CREATE TABLE user_external_ids ( + auth_provider text NOT NULL, + external_id text NOT NULL, + user_id text NOT NULL +); diff --git a/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128201100_access_and_refresh_tokens.sql b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128201100_access_and_refresh_tokens.sql new file mode 100644 index 00000000..8b1ed58e --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250128201100_access_and_refresh_tokens.sql @@ -0,0 +1,28 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +-- Brings in the `access_tokens` and `refresh_tokens` tables from Synapse + +CREATE TABLE access_tokens ( + id bigint NOT NULL, + user_id text NOT NULL, + device_id text, + token text NOT NULL, + valid_until_ms bigint, + puppets_user_id text, + last_validated bigint, + refresh_token_id bigint, + used boolean +); + +CREATE TABLE refresh_tokens ( + id bigint NOT NULL, + user_id text NOT NULL, + device_id text NOT NULL, + token text NOT NULL, + next_token_id bigint, + expiry_ts bigint, + ultimate_session_expiry_ts bigint +); diff --git a/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250129140230_devices.sql b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250129140230_devices.sql new file mode 100644 index 00000000..129df1b6 --- /dev/null +++ b/matrix-authentication-service/crates/syn2mas/test_synapse_migrations/20250129140230_devices.sql @@ -0,0 +1,15 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only +-- Please see LICENSE files in the repository root for full details. + +-- Brings in the `devices` table from Synapse +CREATE TABLE devices ( + user_id text NOT NULL, + device_id text NOT NULL, + display_name text, + last_seen bigint, + ip text, + user_agent text, + hidden boolean DEFAULT false +); diff --git a/matrix-authentication-service/crates/tasks/Cargo.toml b/matrix-authentication-service/crates/tasks/Cargo.toml new file mode 100644 index 00000000..5c2a3153 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/Cargo.toml @@ -0,0 +1,46 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-tasks" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +chrono.workspace = true +cron.workspace = true +opentelemetry-semantic-conventions.workspace = true +opentelemetry.workspace = true +rand_chacha.workspace = true +rand.workspace = true +serde_json.workspace = true +serde.workspace = true +sqlx.workspace = true +thiserror.workspace = true +tokio-util.workspace = true +tokio.workspace = true +tracing-opentelemetry.workspace = true +tracing.workspace = true +ulid.workspace = true + +mas-context.workspace = true +mas-data-model.workspace = true +mas-email.workspace = true +mas-i18n.workspace = true +mas-matrix.workspace = true +mas-router.workspace = true +mas-storage-pg.workspace = true +mas-storage.workspace = true +mas-templates.workspace = true diff --git a/matrix-authentication-service/crates/tasks/src/cleanup/misc.rs b/matrix-authentication-service/crates/tasks/src/cleanup/misc.rs new file mode 100644 index 00000000..52fd62e5 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/cleanup/misc.rs @@ -0,0 +1,88 @@ +// Copyright 2026 Element Creations Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Miscellaneous cleanup tasks + +use std::time::Duration; + +use async_trait::async_trait; +use mas_storage::queue::{CleanupQueueJobsJob, PruneStalePolicyDataJob}; +use tracing::{debug, info}; +use ulid::Ulid; + +use super::BATCH_SIZE; +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +#[async_trait] +impl RunnableJob for CleanupQueueJobsJob { + #[tracing::instrument(name = "job.cleanup_queue_jobs", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Remove completed and failed queue jobs after 30 days. + // Keep them for debugging purposes. + let until = state.clock.now() - chrono::Duration::days(30); + let until = Ulid::from_parts( + u64::try_from(until.timestamp_millis()).unwrap_or(u64::MIN), + u128::MAX, + ); + let mut total = 0; + + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + let (count, cursor) = repo + .queue_job() + .cleanup(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + since = cursor; + total += count; + + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no queue jobs to clean up"); + } else { + info!(count = total, "cleaned up queue jobs"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for PruneStalePolicyDataJob { + #[tracing::instrument(name = "job.prune_stale_policy_data", skip_all)] + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + // Keep the last 10 policy data + let count = repo + .policy_data() + .prune(10) + .await + .map_err(JobError::retry)?; + + repo.save().await.map_err(JobError::retry)?; + + if count == 0 { + debug!("no stale policy data to prune"); + } else { + info!(count, "pruned stale policy data"); + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/cleanup/mod.rs b/matrix-authentication-service/crates/tasks/src/cleanup/mod.rs new file mode 100644 index 00000000..02ace1de --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/cleanup/mod.rs @@ -0,0 +1,24 @@ +// Copyright 2026 Element Creations Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Database cleanup tasks +//! +//! This module contains tasks for cleaning up old data from the database. +//! Tasks are grouped by domain: +//! +//! - [`tokens`]: OAuth token cleanup (access and refresh tokens) +//! - [`sessions`]: Session cleanup (compat, `OAuth2`, user sessions and their +//! IPs) +//! - [`oauth`]: OAuth grants and upstream OAuth cleanup +//! - [`user`]: User-related cleanup (registrations, recovery, email auth) +//! - [`misc`]: Miscellaneous cleanup (queue jobs, policy data) + +mod misc; +mod oauth; +mod sessions; +mod tokens; +mod user; + +pub(crate) const BATCH_SIZE: usize = 1000; diff --git a/matrix-authentication-service/crates/tasks/src/cleanup/oauth.rs b/matrix-authentication-service/crates/tasks/src/cleanup/oauth.rs new file mode 100644 index 00000000..2a201d4d --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/cleanup/oauth.rs @@ -0,0 +1,216 @@ +// Copyright 2026 Element Creations Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! OAuth grants and upstream OAuth cleanup tasks + +use std::time::Duration; + +use async_trait::async_trait; +use mas_storage::queue::{ + CleanupOAuthAuthorizationGrantsJob, CleanupOAuthDeviceCodeGrantsJob, + CleanupUpstreamOAuthLinksJob, CleanupUpstreamOAuthSessionsJob, +}; +use tracing::{debug, info}; +use ulid::Ulid; + +use super::BATCH_SIZE; +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +#[async_trait] +impl RunnableJob for CleanupOAuthAuthorizationGrantsJob { + #[tracing::instrument(name = "job.cleanup_oauth_authorization_grants", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Remove authorization grants after 7 days. They are in practice only + // valid for a short time, but keeping them around helps investigate abuse + // patterns. + let until = state.clock.now() - chrono::Duration::days(7); + // We use the fact that ULIDs include the creation time in their first 48 bits + // as a cursor + let until = Ulid::from_parts( + u64::try_from(until.timestamp_millis()).unwrap_or(u64::MIN), + u128::MAX, + ); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + // This returns the number of deleted grants, and the greatest ULID processed + let (count, cursor) = repo + .oauth2_authorization_grant() + .cleanup(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + since = cursor; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no authorization grants to clean up"); + } else { + info!(count = total, "cleaned up authorization grants"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupOAuthDeviceCodeGrantsJob { + #[tracing::instrument(name = "job.cleanup_oauth_device_code_grants", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Remove device code grants after 7 days. They are in practice only + // valid for a short time, but keeping them around helps investigate abuse + // patterns. + let until = state.clock.now() - chrono::Duration::days(7); + // We use the fact that ULIDs include the creation time in their first 48 bits + // as a cursor + let until = Ulid::from_parts( + u64::try_from(until.timestamp_millis()).unwrap_or(u64::MIN), + u128::MAX, + ); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + // This returns the number of deleted grants, and the greatest ULID processed + let (count, cursor) = repo + .oauth2_device_code_grant() + .cleanup(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + since = cursor; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no device code grants to clean up"); + } else { + info!(count = total, "cleaned up device code grants"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupUpstreamOAuthSessionsJob { + #[tracing::instrument(name = "job.cleanup_upstream_oauth_sessions", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Remove pending upstream OAuth authorization sessions after 7 days. + let until = state.clock.now() - chrono::Duration::days(7); + let until = Ulid::from_parts( + u64::try_from(until.timestamp_millis()).unwrap_or(u64::MIN), + u128::MAX, + ); + let mut total = 0; + + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + let (count, cursor) = repo + .upstream_oauth_session() + .cleanup_orphaned(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + since = cursor; + total += count; + + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no pending upstream OAuth sessions to clean up"); + } else { + info!(count = total, "cleaned up pending upstream OAuth sessions"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupUpstreamOAuthLinksJob { + #[tracing::instrument(name = "job.cleanup_upstream_oauth_links", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Remove orphaned upstream OAuth links after 7 days. + let until = state.clock.now() - chrono::Duration::days(7); + let until = Ulid::from_parts( + u64::try_from(until.timestamp_millis()).unwrap_or(u64::MIN), + u128::MAX, + ); + let mut total = 0; + + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + let (count, cursor) = repo + .upstream_oauth_link() + .cleanup_orphaned(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + since = cursor; + total += count; + + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no orphaned upstream OAuth links to clean up"); + } else { + info!(count = total, "cleaned up orphaned upstream OAuth links"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/cleanup/sessions.rs b/matrix-authentication-service/crates/tasks/src/cleanup/sessions.rs new file mode 100644 index 00000000..0a11a6b9 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/cleanup/sessions.rs @@ -0,0 +1,290 @@ +// Copyright 2026 Element Creations Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Session cleanup tasks + +use std::time::Duration; + +use async_trait::async_trait; +use mas_storage::queue::{ + CleanupFinishedCompatSessionsJob, CleanupFinishedOAuth2SessionsJob, + CleanupFinishedUserSessionsJob, CleanupInactiveCompatSessionIpsJob, + CleanupInactiveOAuth2SessionIpsJob, CleanupInactiveUserSessionIpsJob, +}; +use tracing::{debug, info}; + +use super::BATCH_SIZE; +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +#[async_trait] +impl RunnableJob for CleanupFinishedCompatSessionsJob { + #[tracing::instrument(name = "job.cleanup_finished_compat_sessions", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Cleanup compat sessions that were finished more than 30 days ago + let until = state.clock.now() - chrono::Duration::days(30); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + // This returns the number of deleted sessions, and the last finished_at + // timestamp + let (count, last_finished_at) = repo + .compat_session() + .cleanup_finished(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_finished_at; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no finished compat sessions to clean up"); + } else { + info!(count = total, "cleaned up finished compat sessions"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupFinishedOAuth2SessionsJob { + #[tracing::instrument(name = "job.cleanup_finished_oauth2_sessions", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Cleanup OAuth2 sessions that were finished more than 30 days ago + let until = state.clock.now() - chrono::Duration::days(30); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + // This returns the number of deleted sessions, and the last finished_at + // timestamp + let (count, last_finished_at) = repo + .oauth2_session() + .cleanup_finished(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_finished_at; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no finished OAuth2 sessions to clean up"); + } else { + info!(count = total, "cleaned up finished OAuth2 sessions"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupFinishedUserSessionsJob { + #[tracing::instrument(name = "job.cleanup_finished_user_sessions", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Cleanup user/browser sessions that were finished more than 30 days ago + let until = state.clock.now() - chrono::Duration::days(30); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + // This returns the number of deleted sessions, and the last finished_at + // timestamp. Only deletes sessions that have no child sessions + // (compat_sessions or oauth2_sessions). + let (count, last_finished_at) = repo + .browser_session() + .cleanup_finished(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_finished_at; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no finished user sessions to clean up"); + } else { + info!(count = total, "cleaned up finished user sessions"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupInactiveOAuth2SessionIpsJob { + #[tracing::instrument(name = "job.cleanup_inactive_oauth2_session_ips", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Clear IPs from sessions inactive for 30+ days + let threshold = state.clock.now() - chrono::Duration::days(30); + let mut total = 0; + + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let (count, last_active_at) = repo + .oauth2_session() + .cleanup_inactive_ips(since, threshold, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_active_at; + total += count; + + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no OAuth2 session IPs to clean up"); + } else { + info!(count = total, "cleaned up inactive OAuth2 session IPs"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupInactiveCompatSessionIpsJob { + #[tracing::instrument(name = "job.cleanup_inactive_compat_session_ips", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Clear IPs from sessions inactive for 30+ days + let threshold = state.clock.now() - chrono::Duration::days(30); + let mut total = 0; + + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let (count, last_active_at) = repo + .compat_session() + .cleanup_inactive_ips(since, threshold, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_active_at; + total += count; + + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no compat session IPs to clean up"); + } else { + info!(count = total, "cleaned up inactive compat session IPs"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupInactiveUserSessionIpsJob { + #[tracing::instrument(name = "job.cleanup_inactive_user_session_ips", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Clear IPs from sessions inactive for 30+ days + let threshold = state.clock.now() - chrono::Duration::days(30); + let mut total = 0; + + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let (count, last_active_at) = repo + .browser_session() + .cleanup_inactive_ips(since, threshold, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_active_at; + total += count; + + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no user session IPs to clean up"); + } else { + info!(count = total, "cleaned up inactive user session IPs"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + Some(Duration::from_secs(10 * 60)) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/cleanup/tokens.rs b/matrix-authentication-service/crates/tasks/src/cleanup/tokens.rs new file mode 100644 index 00000000..dd91de2b --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/cleanup/tokens.rs @@ -0,0 +1,214 @@ +// Copyright 2026 Element Creations Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! OAuth token cleanup tasks + +use std::time::Duration; + +use async_trait::async_trait; +use mas_storage::queue::{ + CleanupConsumedOAuthRefreshTokensJob, CleanupExpiredOAuthAccessTokensJob, + CleanupRevokedOAuthAccessTokensJob, CleanupRevokedOAuthRefreshTokensJob, +}; +use tracing::{debug, info}; + +use super::BATCH_SIZE; +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +#[async_trait] +impl RunnableJob for CleanupRevokedOAuthAccessTokensJob { + #[tracing::instrument(name = "job.cleanup_revoked_oauth_access_tokens", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Cleanup tokens that were revoked more than an hour ago + let until = state.clock.now() - chrono::Duration::hours(1); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + // This returns the number of deleted tokens, and the last revoked_at timestamp + let (count, last_revoked_at) = repo + .oauth2_access_token() + .cleanup_revoked(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_revoked_at; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no token to clean up"); + } else { + info!(count = total, "cleaned up revoked tokens"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupExpiredOAuthAccessTokensJob { + #[tracing::instrument(name = "job.cleanup_expired_oauth_access_tokens", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Cleanup tokens that expired more than a month ago + // It is important to keep them around for a bit because of refresh + // token idempotency. When we see a refresh token twice, we allow + // reusing it *only* if both the next refresh token and the next access + // tokens were not used. By keeping expired access tokens around for a + // month, we cannot make the *correct* decision, we will assume that the + // token wasn't used. Refer to the token refresh logic for details. + let until = state.clock.now() - chrono::Duration::days(30); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + // This returns the number of deleted tokens, and the last expires_at timestamp + let (count, last_expires_at) = repo + .oauth2_access_token() + .cleanup_expired(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_expires_at; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no token to clean up"); + } else { + info!(count = total, "cleaned up expired tokens"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + Some(Duration::from_secs(60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupRevokedOAuthRefreshTokensJob { + #[tracing::instrument(name = "job.cleanup_revoked_oauth_refresh_tokens", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Cleanup tokens that were revoked more than an hour ago + let until = state.clock.now() - chrono::Duration::hours(1); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + // This returns the number of deleted tokens, and the last revoked_at timestamp + let (count, last_revoked_at) = repo + .oauth2_refresh_token() + .cleanup_revoked(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_revoked_at; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no token to clean up"); + } else { + info!(count = total, "cleaned up revoked tokens"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupConsumedOAuthRefreshTokensJob { + #[tracing::instrument(name = "job.cleanup_consumed_oauth_refresh_tokens", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Cleanup tokens that were consumed more than an hour ago + let until = state.clock.now() - chrono::Duration::hours(1); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + // This returns the number of deleted tokens, and the last consumed_at timestamp + let (count, last_consumed_at) = repo + .oauth2_refresh_token() + .cleanup_consumed(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_consumed_at; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no token to clean up"); + } else { + info!(count = total, "cleaned up consumed tokens"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/cleanup/user.rs b/matrix-authentication-service/crates/tasks/src/cleanup/user.rs new file mode 100644 index 00000000..d682c1b5 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/cleanup/user.rs @@ -0,0 +1,181 @@ +// Copyright 2026 Element Creations Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! User-related cleanup tasks + +use std::time::Duration; + +use async_trait::async_trait; +use mas_storage::queue::{ + CleanupUserEmailAuthenticationsJob, CleanupUserRecoverySessionsJob, CleanupUserRegistrationsJob, +}; +use tracing::{debug, info}; +use ulid::Ulid; + +use super::BATCH_SIZE; +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +#[async_trait] +impl RunnableJob for CleanupUserRegistrationsJob { + #[tracing::instrument(name = "job.cleanup_user_registrations", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Remove user registrations after 30 days. They are in practice only + // valid for 1h, but keeping them around helps investigate abuse patterns. + let until = state.clock.now() - chrono::Duration::days(30); + // We use the fact that ULIDs include the creation time in their first 48 bits + // as a cursor + let until = Ulid::from_parts( + u64::try_from(until.timestamp_millis()).unwrap_or(u64::MIN), + u128::MAX, + ); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + // This returns the number of deleted registrations, and the greatest ULID + // processed + let (count, cursor) = repo + .user_registration() + .cleanup(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + since = cursor; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no user registrations to clean up"); + } else { + info!(count = total, "cleaned up user registrations"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupUserRecoverySessionsJob { + #[tracing::instrument(name = "job.cleanup_user_recovery_sessions", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Remove recovery sessions after 7 days. They are in practice only + // valid for a short time (tickets expire after 10 minutes), but keeping + // them around helps investigate abuse patterns. + let until = state.clock.now() - chrono::Duration::days(7); + // We use the fact that ULIDs include the creation time in their first 48 bits + // as a cursor + let until = Ulid::from_parts( + u64::try_from(until.timestamp_millis()).unwrap_or(u64::MIN), + u128::MAX, + ); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + // This returns the number of deleted sessions, and the greatest ULID processed + let (count, cursor) = repo + .user_recovery() + .cleanup(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + since = cursor; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no user recovery sessions to clean up"); + } else { + info!(count = total, "cleaned up user recovery sessions"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} + +#[async_trait] +impl RunnableJob for CleanupUserEmailAuthenticationsJob { + #[tracing::instrument(name = "job.cleanup_user_email_authentications", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Remove email authentications after 7 days. They are in practice only + // valid for a short time (codes expire after 10 minutes), but keeping + // them around helps investigate abuse patterns. + let until = state.clock.now() - chrono::Duration::days(7); + // We use the fact that ULIDs include the creation time in their first 48 bits + // as a cursor + let until = Ulid::from_parts( + u64::try_from(until.timestamp_millis()).unwrap_or(u64::MIN), + u128::MAX, + ); + let mut total = 0; + + // Run until we get cancelled. We don't schedule a retry if we get cancelled, as + // this is a scheduled job and it will end up being rescheduled later anyway. + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + // This returns the number of deleted authentications, and the greatest ULID + // processed + let (count, cursor) = repo + .user_email() + .cleanup_authentications(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + since = cursor; + total += count; + + // Check how many we deleted. If we deleted exactly BATCH_SIZE, + // there might be more to delete + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no user email authentications to clean up"); + } else { + info!(count = total, "cleaned up user email authentications"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + // This job runs every hour, so having it running it for 10 minutes is fine + Some(Duration::from_secs(10 * 60)) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/email.rs b/matrix-authentication-service/crates/tasks/src/email.rs new file mode 100644 index 00000000..8e685843 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/email.rs @@ -0,0 +1,135 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use async_trait::async_trait; +use chrono::Duration; +use mas_email::{Address, EmailVerificationContext, Mailbox}; +use mas_storage::queue::{SendEmailAuthenticationCodeJob, VerifyEmailJob}; +use mas_templates::TemplateContext as _; +use rand::{Rng, distributions::Uniform}; +use tracing::info; + +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +#[async_trait] +impl RunnableJob for VerifyEmailJob { + #[tracing::instrument( + name = "job.verify_email", + fields(user_email.id = %self.user_email_id()), + skip_all, + )] + async fn run(&self, _state: &State, _context: JobContext) -> Result<(), JobError> { + // This job was for the old email verification flow, which has been replaced. + // We still want to consume existing jobs in the queue, so we just make them + // permanently fail. + Err(JobError::fail(anyhow::anyhow!("Not implemented"))) + } +} + +#[async_trait] +impl RunnableJob for SendEmailAuthenticationCodeJob { + #[tracing::instrument( + name = "job.send_email_authentication_code", + fields(user_email_authentication.id = %self.user_email_authentication_id()), + skip_all, + )] + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let clock = state.clock(); + let mailer = state.mailer(); + let mut rng = state.rng(); + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let user_email_authentication = repo + .user_email() + .lookup_authentication(self.user_email_authentication_id()) + .await + .map_err(JobError::retry)? + .ok_or(JobError::fail(anyhow::anyhow!( + "User email authentication not found" + )))?; + + if user_email_authentication.completed_at.is_some() { + return Err(JobError::fail(anyhow::anyhow!( + "User email authentication already completed" + ))); + } + + // Load the browser session, if any + let browser_session = + if let Some(browser_session) = user_email_authentication.user_session_id { + Some( + repo.browser_session() + .lookup(browser_session) + .await + .map_err(JobError::retry)? + .ok_or(JobError::fail(anyhow::anyhow!( + "Failed to load browser session" + )))?, + ) + } else { + None + }; + + // Load the registration, if any + let registration = + if let Some(registration_id) = user_email_authentication.user_registration_id { + Some( + repo.user_registration() + .lookup(registration_id) + .await + .map_err(JobError::retry)? + .ok_or(JobError::fail(anyhow::anyhow!( + "Failed to load user registration" + )))?, + ) + } else { + None + }; + + // Generate a new 6-digit authentication code + let range = Uniform::::from(0..1_000_000); + let code = rng.sample(range); + let code = format!("{code:06}"); + let code = repo + .user_email() + .add_authentication_code( + &mut rng, + clock, + Duration::minutes(5), // TODO: make this configurable + &user_email_authentication, + code, + ) + .await + .map_err(JobError::retry)?; + + let address: Address = user_email_authentication + .email + .parse() + .map_err(JobError::fail)?; + let username_from_session = browser_session.as_ref().map(|s| s.user.username.clone()); + let username_from_registration = registration.as_ref().map(|r| r.username.clone()); + let username = username_from_registration.or(username_from_session); + let mailbox = Mailbox::new(username, address); + + info!("Sending email verification code to {}", mailbox); + + let language = self.language().parse().map_err(JobError::fail)?; + + let context = EmailVerificationContext::new(code, browser_session, registration) + .with_language(language); + mailer + .send_verification_email(mailbox, &context) + .await + .map_err(JobError::fail)?; + + repo.save().await.map_err(JobError::fail)?; + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/lib.rs b/matrix-authentication-service/crates/tasks/src/lib.rs new file mode 100644 index 00000000..33748ae8 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/lib.rs @@ -0,0 +1,322 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::{Arc, LazyLock}; + +use mas_data_model::{Clock, SiteConfig}; +use mas_email::Mailer; +use mas_matrix::HomeserverConnection; +use mas_router::UrlBuilder; +use mas_storage::{BoxRepository, RepositoryError, RepositoryFactory}; +use mas_storage_pg::PgRepositoryFactory; +use new_queue::QueueRunnerError; +use opentelemetry::metrics::Meter; +use rand::SeedableRng; +use sqlx::{Pool, Postgres}; +use tokio_util::{sync::CancellationToken, task::TaskTracker}; + +pub use crate::new_queue::QueueWorker; + +mod cleanup; +mod email; +mod matrix; +mod new_queue; +mod recovery; +mod sessions; +mod user; + +static METER: LazyLock = LazyLock::new(|| { + let scope = opentelemetry::InstrumentationScope::builder(env!("CARGO_PKG_NAME")) + .with_version(env!("CARGO_PKG_VERSION")) + .with_schema_url(opentelemetry_semantic_conventions::SCHEMA_URL) + .build(); + + opentelemetry::global::meter_with_scope(scope) +}); + +#[derive(Clone)] +struct State { + repository_factory: PgRepositoryFactory, + mailer: Mailer, + clock: Arc, + homeserver: Arc, + url_builder: UrlBuilder, + site_config: SiteConfig, +} + +impl State { + pub fn new( + repository_factory: PgRepositoryFactory, + clock: impl Clock + 'static, + mailer: Mailer, + homeserver: impl HomeserverConnection + 'static, + url_builder: UrlBuilder, + site_config: SiteConfig, + ) -> Self { + Self { + repository_factory, + mailer, + clock: Arc::new(clock), + homeserver: Arc::new(homeserver), + url_builder, + site_config, + } + } + + pub fn pool(&self) -> Pool { + self.repository_factory.pool() + } + + pub fn clock(&self) -> &dyn Clock { + &self.clock + } + + pub fn mailer(&self) -> &Mailer { + &self.mailer + } + + // This is fine for now, we may move that to a trait at some point. + #[allow(clippy::unused_self, clippy::disallowed_methods)] + pub fn rng(&self) -> rand_chacha::ChaChaRng { + rand_chacha::ChaChaRng::from_rng(rand::thread_rng()).expect("failed to seed rng") + } + + pub async fn repository(&self) -> Result { + self.repository_factory.create().await + } + + pub fn matrix_connection(&self) -> &dyn HomeserverConnection { + self.homeserver.as_ref() + } + + pub fn url_builder(&self) -> &UrlBuilder { + &self.url_builder + } + + pub fn site_config(&self) -> &SiteConfig { + &self.site_config + } +} + +/// Initialise the worker, without running it. +/// +/// This is mostly useful for tests. +/// +/// # Errors +/// +/// This function can fail if the database connection fails. +pub async fn init( + repository_factory: PgRepositoryFactory, + clock: impl Clock + 'static, + mailer: &Mailer, + homeserver: impl HomeserverConnection + 'static, + url_builder: UrlBuilder, + site_config: &SiteConfig, + cancellation_token: CancellationToken, +) -> Result { + let state = State::new( + repository_factory, + clock, + mailer.clone(), + homeserver, + url_builder, + site_config.clone(), + ); + let mut worker = QueueWorker::new(state, cancellation_token).await?; + + worker + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_handler::() + .register_deprecated_queue("cleanup-expired-tokens") + // Recurring jobs are spread across the hour at ~5 minute intervals + // to avoid clustering and distribute database load evenly. + .add_schedule( + "cleanup-revoked-oauth-access-tokens", + // Run this job every hour at minute 0 + "0 0 * * * *".parse()?, + mas_storage::queue::CleanupRevokedOAuthAccessTokensJob, + ) + .add_schedule( + "cleanup-revoked-oauth-refresh-tokens", + // Run this job every hour at minute 5 + "0 5 * * * *".parse()?, + mas_storage::queue::CleanupRevokedOAuthRefreshTokensJob, + ) + .add_schedule( + "cleanup-consumed-oauth-refresh-tokens", + // Run this job every hour at minute 5 (safe to parallelize with revoked) + "0 5 * * * *".parse()?, + mas_storage::queue::CleanupConsumedOAuthRefreshTokensJob, + ) + .add_schedule( + "cleanup-finished-compat-sessions", + // Run this job every hour at minute 10 + "0 10 * * * *".parse()?, + mas_storage::queue::CleanupFinishedCompatSessionsJob, + ) + .add_schedule( + "cleanup-finished-oauth2-sessions", + // Run this job every hour at minute 15 + "0 15 * * * *".parse()?, + mas_storage::queue::CleanupFinishedOAuth2SessionsJob, + ) + .add_schedule( + "cleanup-finished-user-sessions", + // Run this job every hour at minute 20 + "0 20 * * * *".parse()?, + mas_storage::queue::CleanupFinishedUserSessionsJob, + ) + .add_schedule( + "cleanup-inactive-oauth2-session-ips", + // Run this job every hour at minute 25 + "0 25 * * * *".parse()?, + mas_storage::queue::CleanupInactiveOAuth2SessionIpsJob, + ) + .add_schedule( + "cleanup-inactive-compat-session-ips", + // Run this job every hour at minute 25 + "0 25 * * * *".parse()?, + mas_storage::queue::CleanupInactiveCompatSessionIpsJob, + ) + .add_schedule( + "cleanup-inactive-user-session-ips", + // Run this job every hour at minute 25 + "0 25 * * * *".parse()?, + mas_storage::queue::CleanupInactiveUserSessionIpsJob, + ) + .add_schedule( + "cleanup-oauth-authorization-grants", + // Run this job every hour at minute 30 + "0 30 * * * *".parse()?, + mas_storage::queue::CleanupOAuthAuthorizationGrantsJob, + ) + .add_schedule( + "cleanup-oauth-device-code-grants", + // Run this job every hour at minute 35 + "0 35 * * * *".parse()?, + mas_storage::queue::CleanupOAuthDeviceCodeGrantsJob, + ) + .add_schedule( + "cleanup-upstream-oauth-sessions", + // Run this job every hour at minute 40 (independent, safe to parallelize) + "0 40 * * * *".parse()?, + mas_storage::queue::CleanupUpstreamOAuthSessionsJob, + ) + .add_schedule( + "cleanup-upstream-oauth-links", + // Run this job every hour at minute 40 + "0 40 * * * *".parse()?, + mas_storage::queue::CleanupUpstreamOAuthLinksJob, + ) + // User cleanup jobs (minutes 45, 50) + .add_schedule( + "cleanup-user-registrations", + // Run this job every hour at minute 45 + "0 45 * * * *".parse()?, + mas_storage::queue::CleanupUserRegistrationsJob, + ) + .add_schedule( + "cleanup-user-recovery-sessions", + // Run this job every hour at minute 50 + "0 50 * * * *".parse()?, + mas_storage::queue::CleanupUserRecoverySessionsJob, + ) + .add_schedule( + "cleanup-user-email-authentications", + // Run this job every hour at minute 50 + "0 50 * * * *".parse()?, + mas_storage::queue::CleanupUserEmailAuthenticationsJob, + ) + .add_schedule( + "cleanup-queue-jobs", + // Run this job every hour at minute 55 + "0 55 * * * *".parse()?, + mas_storage::queue::CleanupQueueJobsJob, + ) + .add_schedule( + "cleanup-expired-oauth-access-tokens", + // Run this job every 4 hours at minute 5 + "0 5 */4 * * *".parse()?, + mas_storage::queue::CleanupExpiredOAuthAccessTokensJob, + ) + .add_schedule( + "expire-inactive-sessions", + // Run this job every 15 minutes at second 30 + "30 */15 * * * *".parse()?, + mas_storage::queue::ExpireInactiveSessionsJob, + ) + .add_schedule( + "prune-stale-policy-data", + // Run once a day at 2:00 AM + "0 0 2 * * *".parse()?, + mas_storage::queue::PruneStalePolicyDataJob, + ); + + Ok(worker) +} + +/// Initialise the worker and run it. +/// +/// # Errors +/// +/// This function can fail if the database connection fails. +#[expect(clippy::too_many_arguments, reason = "this is fine")] +pub async fn init_and_run( + repository_factory: PgRepositoryFactory, + clock: impl Clock + 'static, + mailer: &Mailer, + homeserver: impl HomeserverConnection + 'static, + url_builder: UrlBuilder, + site_config: &SiteConfig, + cancellation_token: CancellationToken, + task_tracker: &TaskTracker, +) -> Result<(), QueueRunnerError> { + let worker = init( + repository_factory, + clock, + mailer, + homeserver, + url_builder, + site_config, + cancellation_token, + ) + .await?; + + task_tracker.spawn(worker.run()); + + Ok(()) +} diff --git a/matrix-authentication-service/crates/tasks/src/matrix.rs b/matrix-authentication-service/crates/tasks/src/matrix.rs new file mode 100644 index 00000000..68905fe5 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/matrix.rs @@ -0,0 +1,288 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashSet; + +use anyhow::Context; +use async_trait::async_trait; +use mas_data_model::Device; +use mas_matrix::ProvisionRequest; +use mas_storage::{ + Pagination, RepositoryAccess, + compat::CompatSessionFilter, + oauth2::OAuth2SessionFilter, + personal::PersonalSessionFilter, + queue::{ + DeleteDeviceJob, ProvisionDeviceJob, ProvisionUserJob, QueueJobRepositoryExt as _, + SyncDevicesJob, + }, + user::{UserEmailRepository, UserRepository}, +}; +use tracing::info; + +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +/// Job to provision a user on the Matrix homeserver. +/// This works by doing a PUT request to the +/// `/_synapse/admin/v2/users/{user_id}` endpoint. +#[async_trait] +impl RunnableJob for ProvisionUserJob { + #[tracing::instrument( + name = "job.provision_user" + fields(user.id = %self.user_id()), + skip_all, + )] + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let matrix = state.matrix_connection(); + let mut repo = state.repository().await.map_err(JobError::retry)?; + let mut rng = state.rng(); + let clock = state.clock(); + + let user = repo + .user() + .lookup(self.user_id()) + .await + .map_err(JobError::retry)? + .context("User not found") + .map_err(JobError::fail)?; + + let emails = repo + .user_email() + .all(&user) + .await + .map_err(JobError::retry)? + .into_iter() + .map(|email| email.email) + .collect(); + let mut request = + ProvisionRequest::new(user.username.clone(), user.sub.clone()).set_emails(emails); + + if let Some(display_name) = self.display_name_to_set() { + request = request.set_displayname(display_name.to_owned()); + } + + let created = matrix + .provision_user(&request) + .await + .map_err(JobError::retry)?; + + let mxid = matrix.mxid(&user.username); + if created { + info!(%user.id, %mxid, "User created"); + } else { + info!(%user.id, %mxid, "User updated"); + } + + // Schedule a device sync job + let sync_device_job = SyncDevicesJob::new(&user); + repo.queue_job() + .schedule_job(&mut rng, clock, sync_device_job) + .await + .map_err(JobError::retry)?; + + repo.save().await.map_err(JobError::retry)?; + + Ok(()) + } +} + +/// Job to provision a device on the Matrix homeserver. +/// +/// This job is deprecated and therefore just schedules a [`SyncDevicesJob`] +#[async_trait] +impl RunnableJob for ProvisionDeviceJob { + #[tracing::instrument( + name = "job.provision_device" + fields( + user.id = %self.user_id(), + device.id = %self.device_id(), + ), + skip_all, + )] + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let mut repo = state.repository().await.map_err(JobError::retry)?; + let mut rng = state.rng(); + let clock = state.clock(); + + let user = repo + .user() + .lookup(self.user_id()) + .await + .map_err(JobError::retry)? + .context("User not found") + .map_err(JobError::fail)?; + + // Schedule a device sync job + repo.queue_job() + .schedule_job(&mut rng, clock, SyncDevicesJob::new(&user)) + .await + .map_err(JobError::retry)?; + + Ok(()) + } +} + +/// Job to delete a device from a user's account. +/// +/// This job is deprecated and therefore just schedules a [`SyncDevicesJob`] +#[async_trait] +impl RunnableJob for DeleteDeviceJob { + #[tracing::instrument( + name = "job.delete_device" + fields( + user.id = %self.user_id(), + device.id = %self.device_id(), + ), + skip_all, + )] + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let mut rng = state.rng(); + let clock = state.clock(); + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let user = repo + .user() + .lookup(self.user_id()) + .await + .map_err(JobError::retry)? + .context("User not found") + .map_err(JobError::fail)?; + + // Schedule a device sync job + repo.queue_job() + .schedule_job(&mut rng, clock, SyncDevicesJob::new(&user)) + .await + .map_err(JobError::retry)?; + + Ok(()) + } +} + +/// Job to sync the list of devices of a user with the homeserver. +#[async_trait] +impl RunnableJob for SyncDevicesJob { + #[tracing::instrument( + name = "job.sync_devices", + fields(user.id = %self.user_id()), + skip_all, + )] + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let matrix = state.matrix_connection(); + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let user = repo + .user() + .lookup(self.user_id()) + .await + .map_err(JobError::retry)? + .context("User not found") + .map_err(JobError::fail)?; + + // Lock the user sync to make sure we don't get into a race condition + repo.user() + .acquire_lock_for_sync(&user) + .await + .map_err(JobError::retry)?; + + let mut devices = HashSet::new(); + + // Cycle through all the compat sessions of the user, and grab the devices + let mut cursor = Pagination::first(5000); + loop { + let page = repo + .compat_session() + .list( + CompatSessionFilter::new().for_user(&user).active_only(), + cursor, + ) + .await + .map_err(JobError::retry)?; + + for edge in page.edges { + let (compat_session, _) = edge.node; + if let Some(ref device) = compat_session.device { + devices.insert(device.as_str().to_owned()); + } + cursor = cursor.after(edge.cursor); + } + + if !page.has_next_page { + break; + } + } + + // Cycle though all the oauth2 sessions of the user, and grab the devices + let mut cursor = Pagination::first(5000); + loop { + let page = repo + .oauth2_session() + .list( + OAuth2SessionFilter::new().for_user(&user).active_only(), + cursor, + ) + .await + .map_err(JobError::retry)?; + + for edge in page.edges { + for scope in &*edge.node.scope { + if let Some(device) = Device::from_scope_token(scope) { + devices.insert(device.as_str().to_owned()); + } + } + + cursor = cursor.after(edge.cursor); + } + + if !page.has_next_page { + break; + } + } + + // Cycle through all the personal sessions of the user and get the devices + let mut cursor = Pagination::first(5000); + loop { + let page = repo + .personal_session() + .list( + PersonalSessionFilter::new() + .for_actor_user(&user) + .active_only(), + cursor, + ) + .await + .map_err(JobError::retry)?; + + for edge in page.edges { + let (session, _) = &edge.node; + for scope in &*session.scope { + if let Some(device) = Device::from_scope_token(scope) { + devices.insert(device.as_str().to_owned()); + } + } + + cursor = cursor.after(edge.cursor); + } + + if !page.has_next_page { + break; + } + } + + matrix + .sync_devices(&user.username, devices) + .await + .map_err(JobError::retry)?; + + // We kept the connection until now, so that we still hold the lock on the user + // throughout the sync + repo.save().await.map_err(JobError::retry)?; + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/new_queue.rs b/matrix-authentication-service/crates/tasks/src/new_queue.rs new file mode 100644 index 00000000..e7f3a9a9 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/new_queue.rs @@ -0,0 +1,1211 @@ +// Copyright 2025, 2026 Element Creations Ltd. +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, sync::Arc}; + +use async_trait::async_trait; +use chrono::{DateTime, Duration, Utc}; +use cron::Schedule; +use mas_context::LogContext; +use mas_data_model::Clock; +use mas_storage::{ + RepositoryAccess, RepositoryError, + queue::{InsertableJob, Job, JobMetadata, Worker}, +}; +use mas_storage_pg::{DatabaseError, PgRepository}; +use opentelemetry::{ + KeyValue, + metrics::{Counter, Histogram, UpDownCounter}, +}; +use rand::{Rng, RngCore, distributions::Uniform}; +use serde::de::DeserializeOwned; +use sqlx::{ + Acquire, Either, + postgres::{PgAdvisoryLock, PgListener}, +}; +use thiserror::Error; +use tokio::{task::JoinSet, time::Instant}; +use tokio_util::sync::CancellationToken; +use tracing::{Instrument as _, Span}; +use tracing_opentelemetry::OpenTelemetrySpanExt as _; +use ulid::Ulid; + +use crate::{METER, State}; + +type JobPayload = serde_json::Value; + +#[derive(Clone)] +pub struct JobContext { + pub id: Ulid, + pub metadata: JobMetadata, + pub queue_name: String, + pub attempt: usize, + pub start: Instant, + pub cancellation_token: CancellationToken, +} + +impl JobContext { + pub fn span(&self) -> Span { + let span = tracing::info_span!( + parent: Span::none(), + "job.run", + job.id = %self.id, + job.queue.name = self.queue_name, + job.attempt = self.attempt, + ); + + span.add_link(self.metadata.span_context()); + + span + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum JobErrorDecision { + Retry, + + #[default] + Fail, +} + +impl std::fmt::Display for JobErrorDecision { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Retry => f.write_str("retry"), + Self::Fail => f.write_str("fail"), + } + } +} + +#[derive(Debug, Error)] +#[error("Job failed to run, will {decision}")] +pub struct JobError { + decision: JobErrorDecision, + #[source] + error: anyhow::Error, +} + +impl JobError { + pub fn retry>(error: T) -> Self { + Self { + decision: JobErrorDecision::Retry, + error: error.into(), + } + } + + pub fn fail>(error: T) -> Self { + Self { + decision: JobErrorDecision::Fail, + error: error.into(), + } + } +} + +pub trait FromJob { + fn from_job(payload: JobPayload) -> Result + where + Self: Sized; +} + +impl FromJob for T +where + T: DeserializeOwned, +{ + fn from_job(payload: JobPayload) -> Result { + serde_json::from_value(payload).map_err(Into::into) + } +} + +#[async_trait] +pub trait RunnableJob: Send + 'static { + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError>; + + /// Allows the job to set a timeout for its execution. Jobs should then look + /// at the cancellation token passed in the [`JobContext`] to handle + /// graceful shutdowns. + fn timeout(&self) -> Option { + None + } +} + +fn box_runnable_job(job: T) -> Box { + Box::new(job) +} + +#[derive(Debug, Error)] +pub enum QueueRunnerError { + #[error("Failed to setup listener")] + SetupListener(#[source] sqlx::Error), + + #[error("Failed to start transaction")] + StartTransaction(#[source] sqlx::Error), + + #[error("Failed to commit transaction")] + CommitTransaction(#[source] sqlx::Error), + + #[error("Failed to acquire leader lock")] + LeaderLock(#[source] sqlx::Error), + + #[error(transparent)] + Repository(#[from] RepositoryError), + + #[error(transparent)] + Database(#[from] DatabaseError), + + #[error("Invalid schedule expression")] + InvalidSchedule(#[from] cron::error::Error), + + #[error("Worker is not the leader")] + NotLeader, +} + +// When the worker waits for a notification, we still want to wake it up every +// second. Because we don't want all the workers to wake up at the same time, we +// add a random jitter to the sleep duration, so they effectively sleep between +// 0.9 and 1.1 seconds. +const MIN_SLEEP_DURATION: std::time::Duration = std::time::Duration::from_millis(900); +const MAX_SLEEP_DURATION: std::time::Duration = std::time::Duration::from_millis(1100); + +// How many jobs can we run concurrently +const MAX_CONCURRENT_JOBS: usize = 10; + +// How many jobs can we fetch at once +const MAX_JOBS_TO_FETCH: usize = 5; + +// How many attempts a job should be retried +const MAX_ATTEMPTS: usize = 10; + +/// Returns the delay to wait before retrying a job +/// +/// Uses an exponential backoff: 5s, 10s, 20s, 40s, 1m20s, 2m40s, 5m20s, 10m50s, +/// 21m40s, 43m20s +fn retry_delay(attempt: usize) -> Duration { + let attempt = u32::try_from(attempt).unwrap_or(u32::MAX); + Duration::milliseconds(2_i64.saturating_pow(attempt) * 5_000) +} + +type JobResult = (std::time::Duration, Result<(), JobError>); +type JobFactory = Arc Box + Send + Sync>; + +/// This is a fake job we use to consume jobs from deprecated queues +struct DeprecatedJob; + +#[async_trait] +impl RunnableJob for DeprecatedJob { + async fn run(&self, _state: &State, context: JobContext) -> Result<(), JobError> { + tracing::warn!( + job.id = %context.id, + job.queue.name = context.queue_name, + "Consumed a job from a deprecated queue, which can happen after version upgrades. This did nothing other than removing the job from the queue." + ); + + Ok(()) + } +} + +struct ScheduleDefinition { + schedule_name: &'static str, + expression: Schedule, + queue_name: &'static str, + payload: serde_json::Value, +} + +pub struct QueueWorker { + listener: PgListener, + registration: Worker, + am_i_leader: bool, + last_heartbeat: DateTime, + cancellation_token: CancellationToken, + #[expect(dead_code, reason = "This is used on Drop")] + cancellation_guard: tokio_util::sync::DropGuard, + state: State, + schedules: Vec, + tracker: JobTracker, + wakeup_reason: Counter, + tick_time: Histogram, +} + +impl QueueWorker { + #[tracing::instrument( + name = "worker.init", + skip_all, + fields(worker.id) + )] + pub(crate) async fn new( + state: State, + cancellation_token: CancellationToken, + ) -> Result { + let mut rng = state.rng(); + let clock = state.clock(); + + let mut listener = PgListener::connect_with(&state.pool()) + .await + .map_err(QueueRunnerError::SetupListener)?; + + // We get notifications of leader stepping down on this channel + listener + .listen("queue_leader_stepdown") + .await + .map_err(QueueRunnerError::SetupListener)?; + + // We get notifications when a job is available on this channel + listener + .listen("queue_available") + .await + .map_err(QueueRunnerError::SetupListener)?; + + let txn = listener + .begin() + .await + .map_err(QueueRunnerError::StartTransaction)?; + let mut repo = PgRepository::from_conn(txn); + + let registration = repo.queue_worker().register(&mut rng, clock).await?; + tracing::Span::current().record("worker.id", tracing::field::display(registration.id)); + repo.into_inner() + .commit() + .await + .map_err(QueueRunnerError::CommitTransaction)?; + + tracing::info!(worker.id = %registration.id, "Registered worker"); + let now = clock.now(); + + let wakeup_reason = METER + .u64_counter("job.worker.wakeups") + .with_description("Counts how many time the worker has been woken up, for which reason") + .build(); + + // Pre-create the reasons on the counter + wakeup_reason.add(0, &[KeyValue::new("reason", "sleep")]); + wakeup_reason.add(0, &[KeyValue::new("reason", "task")]); + wakeup_reason.add(0, &[KeyValue::new("reason", "notification")]); + + let tick_time = METER + .u64_histogram("job.worker.tick_duration") + .with_description( + "How much time the worker took to tick, including performing leader duties", + ) + .build(); + + // We put a cancellation drop guard in the structure, so that when it gets + // dropped, we're sure to cancel the token + let cancellation_guard = cancellation_token.clone().drop_guard(); + + Ok(Self { + listener, + registration, + am_i_leader: false, + last_heartbeat: now, + cancellation_token, + cancellation_guard, + state, + schedules: Vec::new(), + tracker: JobTracker::new(), + wakeup_reason, + tick_time, + }) + } + + pub(crate) fn register_handler( + &mut self, + ) -> &mut Self { + // There is a potential panic here, which is fine as it's going to be caught + // within the job task + let factory = |payload: JobPayload| { + box_runnable_job(T::from_job(payload).expect("Failed to deserialize job")) + }; + + self.tracker + .factories + .insert(T::QUEUE_NAME, Arc::new(factory)); + self + } + + /// Register a queue name as deprecated, which will consume leftover jobs + pub(crate) fn register_deprecated_queue(&mut self, queue_name: &'static str) -> &mut Self { + let factory = |_payload: JobPayload| box_runnable_job(DeprecatedJob); + self.tracker.factories.insert(queue_name, Arc::new(factory)); + self + } + + pub(crate) fn add_schedule( + &mut self, + schedule_name: &'static str, + expression: Schedule, + job: T, + ) -> &mut Self { + let payload = serde_json::to_value(job).expect("failed to serialize job payload"); + + self.schedules.push(ScheduleDefinition { + schedule_name, + expression, + queue_name: T::QUEUE_NAME, + payload, + }); + + self + } + + pub(crate) async fn run(mut self) { + if let Err(e) = self.run_inner().await { + tracing::error!( + error = &e as &dyn std::error::Error, + "Failed to run new queue" + ); + } + } + + async fn run_inner(&mut self) -> Result<(), QueueRunnerError> { + self.setup_schedules().await?; + + while !self.cancellation_token.is_cancelled() { + LogContext::new("worker-run-loop") + .run(|| self.run_loop()) + .await?; + } + + self.shutdown().await?; + + Ok(()) + } + + #[tracing::instrument(name = "worker.setup_schedules", skip_all)] + pub(crate) async fn setup_schedules(&mut self) -> Result<(), QueueRunnerError> { + let schedules: Vec<_> = self.schedules.iter().map(|s| s.schedule_name).collect(); + + // Start a transaction on the existing PgListener connection + let txn = self + .listener + .begin() + .await + .map_err(QueueRunnerError::StartTransaction)?; + + let mut repo = PgRepository::from_conn(txn); + + // Setup the entries in the queue_schedules table + repo.queue_schedule().setup(&schedules).await?; + + repo.into_inner() + .commit() + .await + .map_err(QueueRunnerError::CommitTransaction)?; + + Ok(()) + } + + #[tracing::instrument(name = "worker.run_loop", skip_all)] + async fn run_loop(&mut self) -> Result<(), QueueRunnerError> { + self.wait_until_wakeup().await?; + + if self.cancellation_token.is_cancelled() { + return Ok(()); + } + + let start = Instant::now(); + self.tick().await?; + + if self.am_i_leader { + self.perform_leader_duties().await?; + } + + let elapsed = start.elapsed().as_millis().try_into().unwrap_or(u64::MAX); + self.tick_time.record(elapsed, &[]); + + Ok(()) + } + + #[tracing::instrument(name = "worker.shutdown", skip_all)] + async fn shutdown(&mut self) -> Result<(), QueueRunnerError> { + tracing::info!("Shutting down worker"); + + let clock = self.state.clock(); + let mut rng = self.state.rng(); + + // Start a transaction on the existing PgListener connection + let txn = self + .listener + .begin() + .await + .map_err(QueueRunnerError::StartTransaction)?; + + let mut repo = PgRepository::from_conn(txn); + + // Log about any job still running + match self.tracker.running_jobs() { + 0 => {} + 1 => tracing::warn!("There is one job still running, waiting for it to finish"), + n => tracing::warn!("There are {n} jobs still running, waiting for them to finish"), + } + + // TODO: we may want to introduce a timeout here, and abort the tasks if they + // take too long. It's fine for now, as we don't have long-running + // tasks, most of them are idempotent, and the only effect might be that + // the worker would 'dirtily' shutdown, meaning that its tasks would be + // considered, later retried by another worker + + // Wait for all the jobs to finish + self.tracker + .process_jobs(&mut rng, clock, &mut repo, true) + .await?; + + // Tell the other workers we're shutting down + // This also releases the leader election lease + repo.queue_worker() + .shutdown(clock, &self.registration) + .await?; + + repo.into_inner() + .commit() + .await + .map_err(QueueRunnerError::CommitTransaction)?; + + Ok(()) + } + + #[tracing::instrument(name = "worker.wait_until_wakeup", skip_all)] + async fn wait_until_wakeup(&mut self) -> Result<(), QueueRunnerError> { + let mut rng = self.state.rng(); + + // This is to make sure we wake up every second to do the maintenance tasks + // We add a little bit of random jitter to the duration, so that we don't get + // fully synced workers waking up at the same time after each notification + let sleep_duration = rng.sample(Uniform::new(MIN_SLEEP_DURATION, MAX_SLEEP_DURATION)); + let wakeup_sleep = tokio::time::sleep(sleep_duration); + + tokio::select! { + () = self.cancellation_token.cancelled() => { + tracing::debug!("Woke up from cancellation"); + }, + + () = wakeup_sleep => { + tracing::debug!("Woke up from sleep"); + self.wakeup_reason.add(1, &[KeyValue::new("reason", "sleep")]); + }, + + () = self.tracker.collect_next_job(), if self.tracker.has_jobs() => { + tracing::debug!("Joined job task"); + self.wakeup_reason.add(1, &[KeyValue::new("reason", "task")]); + }, + + notification = self.listener.recv() => { + self.wakeup_reason.add(1, &[KeyValue::new("reason", "notification")]); + match notification { + Ok(notification) => { + tracing::debug!( + notification.channel = notification.channel(), + notification.payload = notification.payload(), + "Woke up from notification" + ); + }, + Err(e) => { + tracing::error!(error = &e as &dyn std::error::Error, "Failed to receive notification"); + }, + } + }, + } + + Ok(()) + } + + #[tracing::instrument( + name = "worker.tick", + skip_all, + fields(worker.id = %self.registration.id), + )] + async fn tick(&mut self) -> Result<(), QueueRunnerError> { + tracing::debug!("Tick"); + let clock = self.state.clock(); + let mut rng = self.state.rng(); + let now = clock.now(); + + // Start a transaction on the existing PgListener connection + let txn = self + .listener + .begin() + .await + .map_err(QueueRunnerError::StartTransaction)?; + let mut repo = PgRepository::from_conn(txn); + + // We send a heartbeat every minute, to avoid writing to the database too often + // on a logged table + if now - self.last_heartbeat >= chrono::Duration::minutes(1) { + tracing::info!("Sending heartbeat"); + repo.queue_worker() + .heartbeat(clock, &self.registration) + .await?; + self.last_heartbeat = now; + } + + // Remove any dead worker leader leases + repo.queue_worker() + .remove_leader_lease_if_expired(clock) + .await?; + + // Try to become (or stay) the leader + let leader = repo + .queue_worker() + .try_get_leader_lease(clock, &self.registration) + .await?; + + // Process any job task which finished + self.tracker + .process_jobs(&mut rng, clock, &mut repo, false) + .await?; + + // Compute how many jobs we should fetch at most + let max_jobs_to_fetch = MAX_CONCURRENT_JOBS + .saturating_sub(self.tracker.running_jobs()) + .max(MAX_JOBS_TO_FETCH); + + if max_jobs_to_fetch == 0 { + tracing::warn!("Internal job queue is full, not fetching any new jobs"); + } else { + // Grab a few jobs in the queue + let queues = self.tracker.queues(); + let jobs = repo + .queue_job() + .reserve(clock, &self.registration, &queues, max_jobs_to_fetch) + .await?; + + for Job { + id, + queue_name, + payload, + metadata, + attempt, + } in jobs + { + let cancellation_token = self.cancellation_token.child_token(); + let start = Instant::now(); + let context = JobContext { + id, + metadata, + queue_name, + attempt, + start, + cancellation_token, + }; + + self.tracker.spawn_job(self.state.clone(), context, payload); + } + } + + // After this point, we are locking the leader table, so it's important that we + // commit as soon as possible to not block the other workers for too long + repo.into_inner() + .commit() + .await + .map_err(QueueRunnerError::CommitTransaction)?; + + // Save the new leader state to log any change + if leader != self.am_i_leader { + // If we flipped state, log it + self.am_i_leader = leader; + if self.am_i_leader { + tracing::info!("I'm the leader now"); + } else { + tracing::warn!("I am no longer the leader"); + } + } + + Ok(()) + } + + #[tracing::instrument(name = "worker.perform_leader_duties", skip_all)] + async fn perform_leader_duties(&mut self) -> Result<(), QueueRunnerError> { + // This should have been checked by the caller, but better safe than sorry + if !self.am_i_leader { + return Err(QueueRunnerError::NotLeader); + } + + let clock = self.state.clock(); + let mut rng = self.state.rng(); + + // Start a transaction on the existing PgListener connection + let txn = self + .listener + .begin() + .await + .map_err(QueueRunnerError::StartTransaction)?; + + // The thing with the leader election is that it locks the table during the + // election, preventing other workers from going through the loop. + // + // Ideally, we would do the leader duties in the same transaction so that we + // make sure only one worker is doing the leader duties, but that + // would mean we would lock all the workers for the duration of the + // duties, which is not ideal. + // + // So we do the duties in a separate transaction, in which we take an advisory + // lock, so that in the very rare case where two workers think they are the + // leader, we still don't have two workers doing the duties at the same time. + let lock = PgAdvisoryLock::new("leader-duties"); + + let locked = lock + .try_acquire(txn) + .await + .map_err(QueueRunnerError::LeaderLock)?; + + let locked = match locked { + Either::Left(locked) => locked, + Either::Right(txn) => { + tracing::error!("Another worker has the leader lock, aborting"); + txn.rollback() + .await + .map_err(QueueRunnerError::CommitTransaction)?; + return Ok(()); + } + }; + + let mut repo = PgRepository::from_conn(locked); + + // Look at the state of schedules in the database + let schedules_status = repo.queue_schedule().list().await?; + + let now = clock.now(); + for schedule in &self.schedules { + // Find the schedule status from the database + let Some(status) = schedules_status + .iter() + .find(|s| s.schedule_name == schedule.schedule_name) + else { + tracing::error!( + "Schedule {} was not found in the database", + schedule.schedule_name + ); + continue; + }; + + // Figure out if we should schedule a new job + if let Some(next_time) = status.last_scheduled_at { + if next_time > now { + // We already have a job scheduled in the future, skip + continue; + } + + if status.last_scheduled_job_completed == Some(false) { + // The last scheduled job has not completed yet, skip + continue; + } + } + + let next_tick = schedule.expression.after(&now).next().unwrap(); + + tracing::info!( + "Scheduling job for {}, next run at {}", + schedule.schedule_name, + next_tick + ); + + repo.queue_job() + .schedule_later( + &mut rng, + clock, + schedule.queue_name, + schedule.payload.clone(), + serde_json::json!({}), + next_tick, + Some(schedule.schedule_name), + ) + .await?; + } + + // We also check if the worker is dead, and if so, we shutdown all the dead + // workers that haven't checked in the last two minutes + repo.queue_worker() + .shutdown_dead_workers(clock, Duration::minutes(2)) + .await?; + + // TODO: mark tasks those workers had as lost + + // Mark all the scheduled jobs as available + let scheduled = repo.queue_job().schedule_available_jobs(clock).await?; + match scheduled { + 0 => {} + 1 => tracing::info!("One scheduled job marked as available"), + n => tracing::info!("{n} scheduled jobs marked as available"), + } + + // Release the leader lock + let txn = repo + .into_inner() + .release_now() + .await + .map_err(QueueRunnerError::LeaderLock)?; + + txn.commit() + .await + .map_err(QueueRunnerError::CommitTransaction)?; + + Ok(()) + } + + /// Process all the pending jobs in the queue. + /// This should only be called in tests! + /// + /// # Errors + /// + /// This function can fail if the database connection fails. + pub async fn process_all_jobs_in_tests(&mut self) -> Result<(), QueueRunnerError> { + // In case we haven't setup the schedules yet + self.setup_schedules().await?; + + // I swear, I'm the leader! + self.am_i_leader = true; + + // First, perform the leader duties. This will make sure that we schedule + // recurring jobs. + self.perform_leader_duties().await?; + + let clock = self.state.clock(); + let mut rng = self.state.rng(); + + // Grab the connection from the PgListener + let txn = self + .listener + .begin() + .await + .map_err(QueueRunnerError::StartTransaction)?; + let mut repo = PgRepository::from_conn(txn); + + // Spawn all the jobs in the database + let queues = self.tracker.queues(); + let jobs = repo + .queue_job() + // I really hope that we don't spawn more than 10k jobs in tests + .reserve(clock, &self.registration, &queues, 10_000) + .await?; + + for Job { + id, + queue_name, + payload, + metadata, + attempt, + } in jobs + { + let cancellation_token = self.cancellation_token.child_token(); + let start = Instant::now(); + let context = JobContext { + id, + metadata, + queue_name, + attempt, + start, + cancellation_token, + }; + + self.tracker.spawn_job(self.state.clone(), context, payload); + } + + self.tracker + .process_jobs(&mut rng, clock, &mut repo, true) + .await?; + + repo.into_inner() + .commit() + .await + .map_err(QueueRunnerError::CommitTransaction)?; + + Ok(()) + } +} + +/// Tracks running jobs +/// +/// This is a separate structure to be able to borrow it mutably at the same +/// time as the connection to the database is borrowed +struct JobTracker { + /// Stores a mapping from the job queue name to the job factory + factories: HashMap<&'static str, JobFactory>, + + /// A join set of all the currently running jobs + running_jobs: JoinSet, + + /// Stores a mapping from the Tokio task ID to the job context + job_contexts: HashMap, + + /// Stores the last `join_next_with_id` result for processing, in case we + /// got woken up in `collect_next_job` + last_join_result: Option>, + + /// An histogram which records the time it takes to process a job + job_processing_time: Histogram, + + /// A counter which records the number of jobs currently in flight + in_flight_jobs: UpDownCounter, +} + +impl JobTracker { + fn new() -> Self { + let job_processing_time = METER + .u64_histogram("job.process.duration") + .with_description("The time it takes to process a job in milliseconds") + .with_unit("ms") + .build(); + + let in_flight_jobs = METER + .i64_up_down_counter("job.active_tasks") + .with_description("The number of jobs currently in flight") + .with_unit("{job}") + .build(); + + Self { + factories: HashMap::new(), + running_jobs: JoinSet::new(), + job_contexts: HashMap::new(), + last_join_result: None, + job_processing_time, + in_flight_jobs, + } + } + + /// Returns the queue names that are currently being tracked + fn queues(&self) -> Vec<&'static str> { + self.factories.keys().copied().collect() + } + + /// Spawn a job on the job tracker + fn spawn_job(&mut self, state: State, context: JobContext, payload: JobPayload) { + let factory = self.factories.get(context.queue_name.as_str()).cloned(); + let task = { + let log_context = LogContext::new(format!("job-{}", context.queue_name)); + let context = context.clone(); + let span = context.span(); + log_context + .run(async move || { + // We should never crash, but in case we do, we do that in the task and + // don't crash the worker + let job = factory.expect("unknown job factory")(payload); + + let timeout = job.timeout(); + // If there is a timeout set on the job, spawn a task which will cancel the + // CancellationToken once the timeout is reached + if let Some(timeout) = timeout { + let context = context.clone(); + + // It's fine to spawn this task without tracking it, as it is quite + // lightweight and has no reason to crash. + tokio::spawn( + context + .cancellation_token + .clone() + // This makes sure the task gets cancelled as soon as the job + // finishes + .run_until_cancelled_owned(async move { + tokio::time::sleep(timeout).await; + tracing::warn!( + job.id = %context.id, + job.queue.name = %context.queue_name, + "Job reached timeout, asking for cancellation" + ); + context.cancellation_token.cancel(); + }), + ); + } + + tracing::info!( + job.id = %context.id, + job.queue.name = %context.queue_name, + job.attempt = %context.attempt, + job.timeout = timeout.map(tracing::field::debug), + "Running job" + ); + let result = job.run(&state, context.clone()).await; + + // Cancel the cancellation token to stop any timeout task + // that may be running + context.cancellation_token.cancel(); + + let Some(context_stats) = + LogContext::maybe_with(mas_context::LogContext::stats) + else { + // This should never happen, but if it does it's fine: we're recovering fine + // from panics in those tasks + panic!("Missing log context, this should never happen"); + }; + + // We log the result here so that it's attached to the right span & log context + match &result { + Ok(()) => { + tracing::info!( + job.id = %context.id, + job.queue.name = %context.queue_name, + job.attempt = %context.attempt, + "Job completed [{context_stats}]" + ); + } + + Err(JobError { + decision: JobErrorDecision::Fail, + error, + }) => { + tracing::error!( + error = &**error as &dyn std::error::Error, + job.id = %context.id, + job.queue.name = %context.queue_name, + job.attempt = %context.attempt, + "Job failed, not retrying [{context_stats}]" + ); + } + + Err(JobError { + decision: JobErrorDecision::Retry, + error, + }) if context.attempt < MAX_ATTEMPTS => { + let delay = retry_delay(context.attempt); + tracing::warn!( + error = &**error as &dyn std::error::Error, + job.id = %context.id, + job.queue.name = %context.queue_name, + job.attempt = %context.attempt, + "Job failed, will retry in {}s [{context_stats}]", + delay.num_seconds() + ); + } + + Err(JobError { + decision: JobErrorDecision::Retry, + error, + }) => { + tracing::error!( + error = &**error as &dyn std::error::Error, + job.id = %context.id, + job.queue.name = %context.queue_name, + job.attempt = %context.attempt, + "Job failed too many times, abandonning [{context_stats}]" + ); + } + } + + (context_stats.elapsed, result) + }) + .instrument(span) + }; + + self.in_flight_jobs.add( + 1, + &[KeyValue::new("job.queue.name", context.queue_name.clone())], + ); + + let handle = self.running_jobs.spawn(task); + self.job_contexts.insert(handle.id(), context); + } + + /// Returns `true` if there are currently running jobs + fn has_jobs(&self) -> bool { + !self.running_jobs.is_empty() + } + + /// Returns the number of currently running jobs + /// + /// This also includes the job result which may be stored for processing + fn running_jobs(&self) -> usize { + self.running_jobs.len() + usize::from(self.last_join_result.is_some()) + } + + async fn collect_next_job(&mut self) { + // Double-check that we don't have a job result stored + if self.last_join_result.is_some() { + tracing::error!( + "Job tracker already had a job result stored, this should never happen!" + ); + return; + } + + self.last_join_result = self.running_jobs.join_next_with_id().await; + } + + /// Process all the jobs which are currently running + /// + /// If `blocking` is `true`, this function will block until all the jobs + /// are finished. Otherwise, it will return as soon as it processed the + /// already finished jobs. + async fn process_jobs( + &mut self, + rng: &mut (dyn RngCore + Send), + clock: &dyn Clock, + repo: &mut dyn RepositoryAccess, + blocking: bool, + ) -> Result<(), E> { + if self.last_join_result.is_none() { + if blocking { + self.last_join_result = self.running_jobs.join_next_with_id().await; + } else { + self.last_join_result = self.running_jobs.try_join_next_with_id(); + } + } + + while let Some(result) = self.last_join_result.take() { + match result { + // The job succeeded. The logging and time measurement is already done in the task + Ok((id, (elapsed, Ok(())))) => { + let context = self + .job_contexts + .remove(&id) + .expect("Job context not found"); + + self.in_flight_jobs.add( + -1, + &[KeyValue::new("job.queue.name", context.queue_name.clone())], + ); + + let elapsed_ms = elapsed.as_millis().try_into().unwrap_or(u64::MAX); + self.job_processing_time.record( + elapsed_ms, + &[ + KeyValue::new("job.queue.name", context.queue_name), + KeyValue::new("job.result", "success"), + ], + ); + + repo.queue_job() + .mark_as_completed(clock, context.id) + .await?; + } + + // The job failed. The logging and time measurement is already done in the task + Ok((id, (elapsed, Err(e)))) => { + let context = self + .job_contexts + .remove(&id) + .expect("Job context not found"); + + self.in_flight_jobs.add( + -1, + &[KeyValue::new("job.queue.name", context.queue_name.clone())], + ); + + let reason = format!("{:?}", e.error); + repo.queue_job() + .mark_as_failed(clock, context.id, &reason) + .await?; + + let elapsed_ms = elapsed.as_millis().try_into().unwrap_or(u64::MAX); + match e.decision { + JobErrorDecision::Fail => { + self.job_processing_time.record( + elapsed_ms, + &[ + KeyValue::new("job.queue.name", context.queue_name), + KeyValue::new("job.result", "failed"), + KeyValue::new("job.decision", "fail"), + ], + ); + } + + JobErrorDecision::Retry if context.attempt < MAX_ATTEMPTS => { + self.job_processing_time.record( + elapsed_ms, + &[ + KeyValue::new("job.queue.name", context.queue_name), + KeyValue::new("job.result", "failed"), + KeyValue::new("job.decision", "retry"), + ], + ); + + let delay = retry_delay(context.attempt); + repo.queue_job() + .retry(&mut *rng, clock, context.id, delay) + .await?; + } + + JobErrorDecision::Retry => { + self.job_processing_time.record( + elapsed_ms, + &[ + KeyValue::new("job.queue.name", context.queue_name), + KeyValue::new("job.result", "failed"), + KeyValue::new("job.decision", "abandon"), + ], + ); + } + } + } + + // The job crashed (or was aborted) + Err(e) => { + let id = e.id(); + let context = self + .job_contexts + .remove(&id) + .expect("Job context not found"); + + self.in_flight_jobs.add( + -1, + &[KeyValue::new("job.queue.name", context.queue_name.clone())], + ); + + // This measurement is not accurate as it includes the time processing the jobs, + // but it's fine, it's only for panicked tasks + let elapsed = context + .start + .elapsed() + .as_millis() + .try_into() + .unwrap_or(u64::MAX); + + let reason = e.to_string(); + repo.queue_job() + .mark_as_failed(clock, context.id, &reason) + .await?; + + if context.attempt < MAX_ATTEMPTS { + let delay = retry_delay(context.attempt); + tracing::error!( + error = &e as &dyn std::error::Error, + job.id = %context.id, + job.queue.name = %context.queue_name, + job.attempt = %context.attempt, + job.elapsed = format!("{elapsed}ms"), + "Job crashed, will retry in {}s", + delay.num_seconds() + ); + + self.job_processing_time.record( + elapsed, + &[ + KeyValue::new("job.queue.name", context.queue_name), + KeyValue::new("job.result", "crashed"), + KeyValue::new("job.decision", "retry"), + ], + ); + + repo.queue_job() + .retry(&mut *rng, clock, context.id, delay) + .await?; + } else { + tracing::error!( + error = &e as &dyn std::error::Error, + job.id = %context.id, + job.queue.name = %context.queue_name, + job.attempt = %context.attempt, + job.elapsed = format!("{elapsed}ms"), + "Job crashed too many times, abandonning" + ); + + self.job_processing_time.record( + elapsed, + &[ + KeyValue::new("job.queue.name", context.queue_name), + KeyValue::new("job.result", "crashed"), + KeyValue::new("job.decision", "abandon"), + ], + ); + } + } + } + + if blocking { + self.last_join_result = self.running_jobs.join_next_with_id().await; + } else { + self.last_join_result = self.running_jobs.try_join_next_with_id(); + } + } + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/recovery.rs b/matrix-authentication-service/crates/tasks/src/recovery.rs new file mode 100644 index 00000000..51afcc29 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/recovery.rs @@ -0,0 +1,119 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context; +use async_trait::async_trait; +use mas_email::{Address, Mailbox}; +use mas_i18n::DataLocale; +use mas_storage::{ + Pagination, RepositoryAccess, + queue::SendAccountRecoveryEmailsJob, + user::{UserEmailFilter, UserRecoveryRepository}, +}; +use mas_templates::{EmailRecoveryContext, TemplateContext}; +use rand::distributions::{Alphanumeric, DistString}; +use tracing::{error, info}; + +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +/// Job to send account recovery emails for a given recovery session. +#[async_trait] +impl RunnableJob for SendAccountRecoveryEmailsJob { + #[tracing::instrument( + name = "job.send_account_recovery_email", + fields( + user_recovery_session.id = %self.user_recovery_session_id(), + user_recovery_session.email, + ), + skip_all, + )] + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let clock = state.clock(); + let mailer = state.mailer(); + let url_builder = state.url_builder(); + let mut rng = state.rng(); + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let session = repo + .user_recovery() + .lookup_session(self.user_recovery_session_id()) + .await + .map_err(JobError::retry)? + .context("User recovery session not found") + .map_err(JobError::fail)?; + + tracing::Span::current().record("user_recovery_session.email", &session.email); + + if session.consumed_at.is_some() { + info!("Recovery session already consumed, not sending email"); + return Ok(()); + } + + let mut cursor = Pagination::first(50); + + let lang: DataLocale = session + .locale + .parse() + .context("Invalid locale in database on recovery session") + .map_err(JobError::fail)?; + + loop { + let page = repo + .user_email() + .list(UserEmailFilter::new().for_email(&session.email), cursor) + .await + .map_err(JobError::retry)?; + + for edge in page.edges { + let ticket = Alphanumeric.sample_string(&mut rng, 32); + + let ticket = repo + .user_recovery() + .add_ticket(&mut rng, clock, &session, &edge.node, ticket) + .await + .map_err(JobError::retry)?; + + let user = repo + .user() + .lookup(edge.node.user_id) + .await + .map_err(JobError::retry)? + .context("User not found") + .map_err(JobError::fail)?; + + let url = url_builder.account_recovery_link(ticket.ticket); + + let address: Address = edge.node.email.parse().map_err(JobError::fail)?; + let mailbox = Mailbox::new(Some(user.username.clone()), address); + + info!("Sending recovery email to {}", mailbox); + let context = EmailRecoveryContext::new(user, session.clone(), url) + .with_language(lang.clone()); + + // XXX: we only log if the email fails to send, to avoid stopping the loop + if let Err(e) = mailer.send_recovery_email(mailbox, &context).await { + error!( + error = &e as &dyn std::error::Error, + "Failed to send recovery email" + ); + } + + cursor = cursor.after(edge.cursor); + } + + if !page.has_next_page { + break; + } + } + + repo.save().await.map_err(JobError::fail)?; + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/sessions.rs b/matrix-authentication-service/crates/tasks/src/sessions.rs new file mode 100644 index 00000000..eede69d5 --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/sessions.rs @@ -0,0 +1,242 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashSet; + +use async_trait::async_trait; +use chrono::Duration; +use mas_storage::{ + compat::CompatSessionFilter, + oauth2::OAuth2SessionFilter, + queue::{ + ExpireInactiveCompatSessionsJob, ExpireInactiveOAuthSessionsJob, ExpireInactiveSessionsJob, + ExpireInactiveUserSessionsJob, QueueJobRepositoryExt, SyncDevicesJob, + }, + user::BrowserSessionFilter, +}; + +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +#[async_trait] +impl RunnableJob for ExpireInactiveSessionsJob { + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let Some(config) = state.site_config().session_expiration.as_ref() else { + // Automatic session expiration is disabled + return Ok(()); + }; + + let clock = state.clock(); + let mut rng = state.rng(); + let now = clock.now(); + let mut repo = state.repository().await.map_err(JobError::retry)?; + + if let Some(ttl) = config.oauth_session_inactivity_ttl { + repo.queue_job() + .schedule_job( + &mut rng, + clock, + ExpireInactiveOAuthSessionsJob::new(now - ttl), + ) + .await + .map_err(JobError::retry)?; + } + + if let Some(ttl) = config.compat_session_inactivity_ttl { + repo.queue_job() + .schedule_job( + &mut rng, + clock, + ExpireInactiveCompatSessionsJob::new(now - ttl), + ) + .await + .map_err(JobError::retry)?; + } + + if let Some(ttl) = config.user_session_inactivity_ttl { + repo.queue_job() + .schedule_job( + &mut rng, + clock, + ExpireInactiveUserSessionsJob::new(now - ttl), + ) + .await + .map_err(JobError::retry)?; + } + + repo.save().await.map_err(JobError::retry)?; + + Ok(()) + } +} + +#[async_trait] +impl RunnableJob for ExpireInactiveOAuthSessionsJob { + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let mut repo = state.repository().await.map_err(JobError::retry)?; + let clock = state.clock(); + let mut rng = state.rng(); + let mut users_synced = HashSet::new(); + + // This delay is used to space out the device sync jobs + // We add 10 seconds between each device sync, meaning that it will spread out + // the syncs over ~16 minutes max if we get a full batch of 100 users + let mut delay = Duration::minutes(1); + + let filter = OAuth2SessionFilter::new() + .with_last_active_before(self.threshold()) + .for_any_user() + .only_dynamic_clients() + .active_only(); + + let pagination = self.pagination(100); + + let page = repo + .oauth2_session() + .list(filter, pagination) + .await + .map_err(JobError::retry)?; + + if let Some(job) = self.next(&page) { + tracing::info!("Scheduling job to expire the next batch of inactive sessions"); + repo.queue_job() + .schedule_job(&mut rng, clock, job) + .await + .map_err(JobError::retry)?; + } + + for edge in page.edges { + if let Some(user_id) = edge.node.user_id { + let inserted = users_synced.insert(user_id); + if inserted { + tracing::info!(user.id = %user_id, "Scheduling devices sync for user"); + repo.queue_job() + .schedule_job_later( + &mut rng, + clock, + SyncDevicesJob::new_for_id(user_id), + clock.now() + delay, + ) + .await + .map_err(JobError::retry)?; + delay += Duration::seconds(10); + } + } + + repo.oauth2_session() + .finish(clock, edge.node) + .await + .map_err(JobError::retry)?; + } + + repo.save().await.map_err(JobError::retry)?; + + Ok(()) + } +} + +#[async_trait] +impl RunnableJob for ExpireInactiveCompatSessionsJob { + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let mut repo = state.repository().await.map_err(JobError::retry)?; + let clock = state.clock(); + let mut rng = state.rng(); + let mut users_synced = HashSet::new(); + + // This delay is used to space out the device sync jobs + // We add 10 seconds between each device sync, meaning that it will spread out + // the syncs over ~16 minutes max if we get a full batch of 100 users + let mut delay = Duration::minutes(1); + + let filter = CompatSessionFilter::new() + .with_last_active_before(self.threshold()) + .active_only(); + + let pagination = self.pagination(100); + + let page = repo + .compat_session() + .list(filter, pagination) + .await + .map_err(JobError::retry)? + .map(|(c, _)| c); + + if let Some(job) = self.next(&page) { + tracing::info!("Scheduling job to expire the next batch of inactive sessions"); + repo.queue_job() + .schedule_job(&mut rng, clock, job) + .await + .map_err(JobError::retry)?; + } + + for edge in page.edges { + let inserted = users_synced.insert(edge.node.user_id); + if inserted { + tracing::info!(user.id = %edge.node.user_id, "Scheduling devices sync for user"); + repo.queue_job() + .schedule_job_later( + &mut rng, + clock, + SyncDevicesJob::new_for_id(edge.node.user_id), + clock.now() + delay, + ) + .await + .map_err(JobError::retry)?; + delay += Duration::seconds(10); + } + + repo.compat_session() + .finish(clock, edge.node) + .await + .map_err(JobError::retry)?; + } + + repo.save().await.map_err(JobError::retry)?; + + Ok(()) + } +} + +#[async_trait] +impl RunnableJob for ExpireInactiveUserSessionsJob { + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let mut repo = state.repository().await.map_err(JobError::retry)?; + let clock = state.clock(); + let mut rng = state.rng(); + + let filter = BrowserSessionFilter::new() + .with_last_active_before(self.threshold()) + .active_only(); + + let pagination = self.pagination(100); + + let page = repo + .browser_session() + .list(filter, pagination) + .await + .map_err(JobError::retry)?; + + if let Some(job) = self.next(&page) { + tracing::info!("Scheduling job to expire the next batch of inactive sessions"); + repo.queue_job() + .schedule_job(&mut rng, clock, job) + .await + .map_err(JobError::retry)?; + } + + for edge in page.edges { + repo.browser_session() + .finish(clock, edge.node) + .await + .map_err(JobError::retry)?; + } + + repo.save().await.map_err(JobError::retry)?; + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/tasks/src/user.rs b/matrix-authentication-service/crates/tasks/src/user.rs new file mode 100644 index 00000000..73ea444a --- /dev/null +++ b/matrix-authentication-service/crates/tasks/src/user.rs @@ -0,0 +1,184 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use anyhow::Context; +use async_trait::async_trait; +use mas_storage::{ + RepositoryAccess, + compat::CompatSessionFilter, + oauth2::OAuth2SessionFilter, + personal::PersonalSessionFilter, + queue::{DeactivateUserJob, ReactivateUserJob}, + user::{BrowserSessionFilter, UserEmailFilter, UserRepository}, +}; +use tracing::info; + +use crate::{ + State, + new_queue::{JobContext, JobError, RunnableJob}, +}; + +/// Job to deactivate a user, both locally and on the Matrix homeserver. +#[async_trait] +impl RunnableJob for DeactivateUserJob { + #[tracing::instrument( + name = "job.deactivate_user" + fields(user.id = %self.user_id(), erase = %self.hs_erase()), + skip_all, + )] + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let clock = state.clock(); + let matrix = state.matrix_connection(); + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let user = repo + .user() + .lookup(self.user_id()) + .await + .map_err(JobError::retry)? + .context("User not found") + .map_err(JobError::fail)?; + + // Let's first deactivate the user + let user = repo + .user() + .deactivate(clock, user) + .await + .context("Failed to deactivate user") + .map_err(JobError::retry)?; + + // Kill all sessions for the user + let n = repo + .browser_session() + .finish_bulk( + clock, + BrowserSessionFilter::new().for_user(&user).active_only(), + ) + .await + .map_err(JobError::retry)?; + info!(affected = n, "Killed all browser sessions for user"); + + let n = repo + .oauth2_session() + .finish_bulk( + clock, + OAuth2SessionFilter::new().for_user(&user).active_only(), + ) + .await + .map_err(JobError::retry)?; + info!(affected = n, "Killed all OAuth 2.0 sessions for user"); + + let n = repo + .compat_session() + .finish_bulk( + clock, + CompatSessionFilter::new().for_user(&user).active_only(), + ) + .await + .map_err(JobError::retry)?; + info!(affected = n, "Killed all compatibility sessions for user"); + + let n = repo + .personal_session() + .revoke_bulk( + clock, + PersonalSessionFilter::new() + .for_actor_user(&user) + .active_only(), + ) + .await + .map_err(JobError::retry)?; + info!( + affected = n, + "Killed all compatibility sessions acting as user" + ); + + let n = repo + .personal_session() + .revoke_bulk( + clock, + PersonalSessionFilter::new() + .for_owner_user(&user) + .active_only(), + ) + .await + .map_err(JobError::retry)?; + info!( + affected = n, + "Killed all compatibility sessions owned by user" + ); + + // Delete all the email addresses for the user + let n = repo + .user_email() + .remove_bulk(UserEmailFilter::new().for_user(&user)) + .await + .map_err(JobError::retry)?; + info!(affected = n, "Removed all email addresses for user"); + + // Delete all unsupported third-party IDs for the user + let n = repo + .user() + .delete_unsupported_threepids(&user) + .await + .map_err(JobError::retry)?; + info!( + affected = n, + "Removed all unsupported third-party IDs for user" + ); + + // Before calling back to the homeserver, commit the changes to the database, as + // we want the user to be locked out as soon as possible + repo.save().await.map_err(JobError::retry)?; + + info!("Deactivating user {} on homeserver", user.username); + matrix + .delete_user(&user.username, self.hs_erase()) + .await + .map_err(JobError::retry)?; + + Ok(()) + } +} + +/// Job to reactivate a user, both locally and on the Matrix homeserver. +#[async_trait] +impl RunnableJob for ReactivateUserJob { + #[tracing::instrument( + name = "job.reactivate_user", + fields(user.id = %self.user_id()), + skip_all, + )] + async fn run(&self, state: &State, _context: JobContext) -> Result<(), JobError> { + let matrix = state.matrix_connection(); + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let user = repo + .user() + .lookup(self.user_id()) + .await + .map_err(JobError::retry)? + .context("User not found") + .map_err(JobError::fail)?; + + info!("Reactivating user {} on homeserver", user.username); + matrix + .reactivate_user(&user.username) + .await + .map_err(JobError::retry)?; + + // We want to reactivate the user from our side only once it has been + // reactivated on the homeserver + let _user = repo + .user() + .reactivate(user) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + Ok(()) + } +} diff --git a/matrix-authentication-service/crates/templates/Cargo.toml b/matrix-authentication-service/crates/templates/Cargo.toml new file mode 100644 index 00000000..d9c1bb01 --- /dev/null +++ b/matrix-authentication-service/crates/templates/Cargo.toml @@ -0,0 +1,49 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-templates" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +anyhow.workspace = true +arc-swap.workspace = true +camino.workspace = true +chrono.workspace = true +http.workspace = true +minijinja-contrib.workspace = true +minijinja.workspace = true +rand.workspace = true +rand_chacha.workspace = true +serde_json.workspace = true +serde_urlencoded.workspace = true +serde.workspace = true +thiserror.workspace = true +tokio.workspace = true +tracing.workspace = true +ulid.workspace = true +url.workspace = true +v_htmlescape.workspace = true +walkdir.workspace = true + +oauth2-types.workspace = true +mas-data-model.workspace = true +mas-i18n.workspace = true +mas-iana.workspace = true +mas-policy.workspace = true +mas-router.workspace = true +mas-spa.workspace = true + +[dev-dependencies] +rand_chacha.workspace = true diff --git a/matrix-authentication-service/crates/templates/src/context.rs b/matrix-authentication-service/crates/templates/src/context.rs new file mode 100644 index 00000000..d43556fa --- /dev/null +++ b/matrix-authentication-service/crates/templates/src/context.rs @@ -0,0 +1,2132 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +//! Contexts used in templates + +mod branding; +mod captcha; +mod ext; +mod features; + +use std::{ + collections::BTreeMap, + fmt::Formatter, + net::{IpAddr, Ipv4Addr}, +}; + +use chrono::{DateTime, Duration, Utc}; +use http::{Method, Uri, Version}; +use mas_data_model::{ + AuthorizationGrant, BrowserSession, Client, CompatSsoLogin, CompatSsoLoginState, + DeviceCodeGrant, MatrixUser, UpstreamOAuthLink, UpstreamOAuthProvider, + UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderDiscoveryMode, + UpstreamOAuthProviderOnBackchannelLogout, UpstreamOAuthProviderPkceMode, + UpstreamOAuthProviderTokenAuthMethod, User, UserEmailAuthentication, + UserEmailAuthenticationCode, UserRecoverySession, UserRegistration, +}; +use mas_i18n::DataLocale; +use mas_iana::jose::JsonWebSignatureAlg; +use mas_policy::{Violation, ViolationCode}; +use mas_router::{Account, GraphQL, PostAuthAction, UrlBuilder}; +use oauth2_types::scope::{OPENID, Scope}; +use rand::{ + Rng, SeedableRng, + distributions::{Alphanumeric, DistString}, +}; +use rand_chacha::ChaCha8Rng; +use serde::{Deserialize, Serialize, ser::SerializeStruct}; +use ulid::Ulid; +use url::Url; + +pub use self::{ + branding::SiteBranding, captcha::WithCaptcha, ext::SiteConfigExt, features::SiteFeatures, +}; +use crate::{FieldError, FormField, FormState}; + +/// Helper trait to construct context wrappers +pub trait TemplateContext: Serialize { + /// Attach a user session to the template context + fn with_session(self, current_session: BrowserSession) -> WithSession + where + Self: Sized, + { + WithSession { + current_session, + inner: self, + } + } + + /// Attach an optional user session to the template context + fn maybe_with_session( + self, + current_session: Option, + ) -> WithOptionalSession + where + Self: Sized, + { + WithOptionalSession { + current_session, + inner: self, + } + } + + /// Attach a CSRF token to the template context + fn with_csrf(self, csrf_token: C) -> WithCsrf + where + Self: Sized, + C: ToString, + { + // TODO: make this method use a CsrfToken again + WithCsrf { + csrf_token: csrf_token.to_string(), + inner: self, + } + } + + /// Attach a language to the template context + fn with_language(self, lang: DataLocale) -> WithLanguage + where + Self: Sized, + { + WithLanguage { + lang: lang.to_string(), + inner: self, + } + } + + /// Attach a CAPTCHA configuration to the template context + fn with_captcha(self, captcha: Option) -> WithCaptcha + where + Self: Sized, + { + WithCaptcha::new(captcha, self) + } + + /// Generate sample values for this context type + /// + /// This is then used to check for template validity in unit tests and in + /// the CLI (`cargo run -- templates check`) + fn sample( + now: chrono::DateTime, + rng: &mut R, + locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized; +} + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct SampleIdentifier { + pub components: Vec<(&'static str, String)>, +} + +impl SampleIdentifier { + pub fn from_index(index: usize) -> Self { + Self { + components: Vec::default(), + } + .with_appended("index", format!("{index}")) + } + + pub fn with_appended(&self, kind: &'static str, locale: String) -> Self { + let mut new = self.clone(); + new.components.push((kind, locale)); + new + } +} + +pub(crate) fn sample_list(samples: Vec) -> BTreeMap { + samples + .into_iter() + .enumerate() + .map(|(index, sample)| (SampleIdentifier::from_index(index), sample)) + .collect() +} + +impl TemplateContext for () { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + BTreeMap::new() + } +} + +/// Context with a specified locale in it +#[derive(Serialize, Debug)] +pub struct WithLanguage { + lang: String, + + #[serde(flatten)] + inner: T, +} + +impl WithLanguage { + /// Get the language of this context + pub fn language(&self) -> &str { + &self.lang + } +} + +impl std::ops::Deref for WithLanguage { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +impl TemplateContext for WithLanguage { + fn sample( + now: chrono::DateTime, + rng: &mut R, + locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + // Create a forked RNG so we make samples deterministic between locales + let rng = ChaCha8Rng::from_rng(rng).unwrap(); + locales + .iter() + .flat_map(|locale| { + T::sample(now, &mut rng.clone(), locales) + .into_iter() + .map(|(sample_id, sample)| { + ( + sample_id.with_appended("locale", locale.to_string()), + WithLanguage { + lang: locale.to_string(), + inner: sample, + }, + ) + }) + }) + .collect() + } +} + +/// Context with a CSRF token in it +#[derive(Serialize, Debug)] +pub struct WithCsrf { + csrf_token: String, + + #[serde(flatten)] + inner: T, +} + +impl TemplateContext for WithCsrf { + fn sample( + now: chrono::DateTime, + rng: &mut R, + locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + T::sample(now, rng, locales) + .into_iter() + .map(|(k, inner)| { + ( + k, + WithCsrf { + csrf_token: "fake_csrf_token".into(), + inner, + }, + ) + }) + .collect() + } +} + +/// Context with a user session in it +#[derive(Serialize)] +pub struct WithSession { + current_session: BrowserSession, + + #[serde(flatten)] + inner: T, +} + +impl TemplateContext for WithSession { + fn sample( + now: chrono::DateTime, + rng: &mut R, + locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + BrowserSession::samples(now, rng) + .into_iter() + .enumerate() + .flat_map(|(session_index, session)| { + T::sample(now, rng, locales) + .into_iter() + .map(move |(k, inner)| { + ( + k.with_appended("browser-session", session_index.to_string()), + WithSession { + current_session: session.clone(), + inner, + }, + ) + }) + }) + .collect() + } +} + +/// Context with an optional user session in it +#[derive(Serialize)] +pub struct WithOptionalSession { + current_session: Option, + + #[serde(flatten)] + inner: T, +} + +impl TemplateContext for WithOptionalSession { + fn sample( + now: chrono::DateTime, + rng: &mut R, + locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + BrowserSession::samples(now, rng) + .into_iter() + .map(Some) // Wrap all samples in an Option + .chain(std::iter::once(None)) // Add the "None" option + .enumerate() + .flat_map(|(session_index, session)| { + T::sample(now, rng, locales) + .into_iter() + .map(move |(k, inner)| { + ( + if session.is_some() { + k.with_appended("browser-session", session_index.to_string()) + } else { + k + }, + WithOptionalSession { + current_session: session.clone(), + inner, + }, + ) + }) + }) + .collect() + } +} + +/// An empty context used for composition +pub struct EmptyContext; + +impl Serialize for EmptyContext { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut s = serializer.serialize_struct("EmptyContext", 0)?; + // FIXME: for some reason, serde seems to not like struct flattening with empty + // stuff + s.serialize_field("__UNUSED", &())?; + s.end() + } +} + +impl TemplateContext for EmptyContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![EmptyContext]) + } +} + +/// Context used by the `index.html` template +#[derive(Serialize)] +pub struct IndexContext { + discovery_url: Url, +} + +impl IndexContext { + /// Constructs the context for the index page from the OIDC discovery + /// document URL + #[must_use] + pub fn new(discovery_url: Url) -> Self { + Self { discovery_url } + } +} + +impl TemplateContext for IndexContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![Self { + discovery_url: "https://example.com/.well-known/openid-configuration" + .parse() + .unwrap(), + }]) + } +} + +/// Config used by the frontend app +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct AppConfig { + root: String, + graphql_endpoint: String, +} + +/// Context used by the `app.html` template +#[derive(Serialize)] +pub struct AppContext { + app_config: AppConfig, +} + +impl AppContext { + /// Constructs the context given the [`UrlBuilder`] + #[must_use] + pub fn from_url_builder(url_builder: &UrlBuilder) -> Self { + let root = url_builder.relative_url_for(&Account::default()); + let graphql_endpoint = url_builder.relative_url_for(&GraphQL); + Self { + app_config: AppConfig { + root, + graphql_endpoint, + }, + } + } +} + +impl TemplateContext for AppContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + let url_builder = UrlBuilder::new("https://example.com/".parse().unwrap(), None, None); + sample_list(vec![Self::from_url_builder(&url_builder)]) + } +} + +/// Context used by the `swagger/doc.html` template +#[derive(Serialize)] +pub struct ApiDocContext { + openapi_url: Url, + callback_url: Url, +} + +impl ApiDocContext { + /// Constructs a context for the API documentation page giben the + /// [`UrlBuilder`] + #[must_use] + pub fn from_url_builder(url_builder: &UrlBuilder) -> Self { + Self { + openapi_url: url_builder.absolute_url_for(&mas_router::ApiSpec), + callback_url: url_builder.absolute_url_for(&mas_router::ApiDocCallback), + } + } +} + +impl TemplateContext for ApiDocContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + let url_builder = UrlBuilder::new("https://example.com/".parse().unwrap(), None, None); + sample_list(vec![Self::from_url_builder(&url_builder)]) + } +} + +/// Fields of the login form +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum LoginFormField { + /// The username field + Username, + + /// The password field + Password, +} + +impl FormField for LoginFormField { + fn keep(&self) -> bool { + match self { + Self::Username => true, + Self::Password => false, + } + } +} + +/// Inner context used in login screen. See [`PostAuthContext`]. +#[derive(Serialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum PostAuthContextInner { + /// Continue an authorization grant + ContinueAuthorizationGrant { + /// The authorization grant that will be continued after authentication + grant: Box, + }, + + /// Continue a device code grant + ContinueDeviceCodeGrant { + /// The device code grant that will be continued after authentication + grant: Box, + }, + + /// Continue legacy login + /// TODO: add the login context in there + ContinueCompatSsoLogin { + /// The compat SSO login request + login: Box, + }, + + /// Change the account password + ChangePassword, + + /// Link an upstream account + LinkUpstream { + /// The upstream provider + provider: Box, + + /// The link + link: Box, + }, + + /// Go to the account management page + ManageAccount, +} + +/// Context used in login screen, for the post-auth action to do +#[derive(Serialize)] +pub struct PostAuthContext { + /// The post auth action params from the URL + pub params: PostAuthAction, + + /// The loaded post auth context + #[serde(flatten)] + pub ctx: PostAuthContextInner, +} + +/// Context used by the `login.html` template +#[derive(Serialize, Default)] +pub struct LoginContext { + form: FormState, + next: Option, + providers: Vec, +} + +impl TemplateContext for LoginContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + // TODO: samples with errors + sample_list(vec![ + LoginContext { + form: FormState::default(), + next: None, + providers: Vec::new(), + }, + LoginContext { + form: FormState::default(), + next: None, + providers: Vec::new(), + }, + LoginContext { + form: FormState::default() + .with_error_on_field(LoginFormField::Username, FieldError::Required) + .with_error_on_field( + LoginFormField::Password, + FieldError::Policy { + code: None, + message: "password too short".to_owned(), + }, + ), + next: None, + providers: Vec::new(), + }, + LoginContext { + form: FormState::default() + .with_error_on_field(LoginFormField::Username, FieldError::Exists), + next: None, + providers: Vec::new(), + }, + ]) + } +} + +impl LoginContext { + /// Set the form state + #[must_use] + pub fn with_form_state(self, form: FormState) -> Self { + Self { form, ..self } + } + + /// Mutably borrow the form state + pub fn form_state_mut(&mut self) -> &mut FormState { + &mut self.form + } + + /// Set the upstream OAuth 2.0 providers + #[must_use] + pub fn with_upstream_providers(self, providers: Vec) -> Self { + Self { providers, ..self } + } + + /// Add a post authentication action to the context + #[must_use] + pub fn with_post_action(self, context: PostAuthContext) -> Self { + Self { + next: Some(context), + ..self + } + } +} + +/// Fields of the registration form +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum RegisterFormField { + /// The username field + Username, + + /// The email field + Email, + + /// The password field + Password, + + /// The password confirmation field + PasswordConfirm, + + /// The terms of service agreement field + AcceptTerms, +} + +impl FormField for RegisterFormField { + fn keep(&self) -> bool { + match self { + Self::Username | Self::Email | Self::AcceptTerms => true, + Self::Password | Self::PasswordConfirm => false, + } + } +} + +/// Context used by the `register.html` template +#[derive(Serialize, Default)] +pub struct RegisterContext { + providers: Vec, + next: Option, +} + +impl TemplateContext for RegisterContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![RegisterContext { + providers: Vec::new(), + next: None, + }]) + } +} + +impl RegisterContext { + /// Create a new context with the given upstream providers + #[must_use] + pub fn new(providers: Vec) -> Self { + Self { + providers, + next: None, + } + } + + /// Add a post authentication action to the context + #[must_use] + pub fn with_post_action(self, next: PostAuthContext) -> Self { + Self { + next: Some(next), + ..self + } + } +} + +/// Context used by the `password_register.html` template +#[derive(Serialize, Default)] +pub struct PasswordRegisterContext { + form: FormState, + next: Option, +} + +impl TemplateContext for PasswordRegisterContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + // TODO: samples with errors + sample_list(vec![PasswordRegisterContext { + form: FormState::default(), + next: None, + }]) + } +} + +impl PasswordRegisterContext { + /// Add an error on the registration form + #[must_use] + pub fn with_form_state(self, form: FormState) -> Self { + Self { form, ..self } + } + + /// Add a post authentication action to the context + #[must_use] + pub fn with_post_action(self, next: PostAuthContext) -> Self { + Self { + next: Some(next), + ..self + } + } +} + +/// Context used by the `consent.html` template +#[derive(Serialize)] +pub struct ConsentContext { + grant: AuthorizationGrant, + client: Client, + action: PostAuthAction, + matrix_user: MatrixUser, +} + +impl TemplateContext for ConsentContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list( + Client::samples(now, rng) + .into_iter() + .map(|client| { + let mut grant = AuthorizationGrant::sample(now, rng); + let action = PostAuthAction::continue_grant(grant.id); + // XXX + grant.client_id = client.id; + Self { + grant, + client, + action, + matrix_user: MatrixUser { + mxid: "@alice:example.com".to_owned(), + display_name: Some("Alice".to_owned()), + }, + } + }) + .collect(), + ) + } +} + +impl ConsentContext { + /// Constructs a context for the client consent page + #[must_use] + pub fn new(grant: AuthorizationGrant, client: Client, matrix_user: MatrixUser) -> Self { + let action = PostAuthAction::continue_grant(grant.id); + Self { + grant, + client, + action, + matrix_user, + } + } +} + +#[derive(Serialize)] +#[serde(tag = "grant_type")] +enum PolicyViolationGrant { + #[serde(rename = "authorization_code")] + Authorization(AuthorizationGrant), + #[serde(rename = "urn:ietf:params:oauth:grant-type:device_code")] + DeviceCode(DeviceCodeGrant), +} + +/// Context used by the `policy_violation.html` template +#[derive(Serialize)] +pub struct PolicyViolationContext { + grant: PolicyViolationGrant, + client: Client, + action: PostAuthAction, +} + +impl TemplateContext for PolicyViolationContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list( + Client::samples(now, rng) + .into_iter() + .flat_map(|client| { + let mut grant = AuthorizationGrant::sample(now, rng); + // XXX + grant.client_id = client.id; + + let authorization_grant = + PolicyViolationContext::for_authorization_grant(grant, client.clone()); + let device_code_grant = PolicyViolationContext::for_device_code_grant( + DeviceCodeGrant { + id: Ulid::from_datetime_with_source(now.into(), rng), + state: mas_data_model::DeviceCodeGrantState::Pending, + client_id: client.id, + scope: [OPENID].into_iter().collect(), + user_code: Alphanumeric.sample_string(rng, 6).to_uppercase(), + device_code: Alphanumeric.sample_string(rng, 32), + created_at: now - Duration::try_minutes(5).unwrap(), + expires_at: now + Duration::try_minutes(25).unwrap(), + ip_address: None, + user_agent: None, + }, + client, + ); + + [authorization_grant, device_code_grant] + }) + .collect(), + ) + } +} + +impl PolicyViolationContext { + /// Constructs a context for the policy violation page for an authorization + /// grant + #[must_use] + pub const fn for_authorization_grant(grant: AuthorizationGrant, client: Client) -> Self { + let action = PostAuthAction::continue_grant(grant.id); + Self { + grant: PolicyViolationGrant::Authorization(grant), + client, + action, + } + } + + /// Constructs a context for the policy violation page for a device code + /// grant + #[must_use] + pub const fn for_device_code_grant(grant: DeviceCodeGrant, client: Client) -> Self { + let action = PostAuthAction::continue_device_code_grant(grant.id); + Self { + grant: PolicyViolationGrant::DeviceCode(grant), + client, + action, + } + } +} + +/// Context used by the `compat_login_policy_violation.html` template +#[derive(Serialize)] +pub struct CompatLoginPolicyViolationContext { + violations: Vec, +} + +impl TemplateContext for CompatLoginPolicyViolationContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![ + CompatLoginPolicyViolationContext { violations: vec![] }, + CompatLoginPolicyViolationContext { + violations: vec![Violation { + msg: "user has too many active sessions".to_owned(), + redirect_uri: None, + field: None, + code: Some(ViolationCode::TooManySessions), + }], + }, + ]) + } +} + +impl CompatLoginPolicyViolationContext { + /// Constructs a context for the compatibility login policy violation page + /// given the list of violations + #[must_use] + pub const fn for_violations(violations: Vec) -> Self { + Self { violations } + } +} + +/// Context used by the `sso.html` template +#[derive(Serialize)] +pub struct CompatSsoContext { + login: CompatSsoLogin, + action: PostAuthAction, + matrix_user: MatrixUser, +} + +impl TemplateContext for CompatSsoContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + let id = Ulid::from_datetime_with_source(now.into(), rng); + sample_list(vec![CompatSsoContext::new( + CompatSsoLogin { + id, + redirect_uri: Url::parse("https://app.element.io/").unwrap(), + login_token: "abcdefghijklmnopqrstuvwxyz012345".into(), + created_at: now, + state: CompatSsoLoginState::Pending, + }, + MatrixUser { + mxid: "@alice:example.com".to_owned(), + display_name: Some("Alice".to_owned()), + }, + )]) + } +} + +impl CompatSsoContext { + /// Constructs a context for the legacy SSO login page + #[must_use] + pub fn new(login: CompatSsoLogin, matrix_user: MatrixUser) -> Self +where { + let action = PostAuthAction::continue_compat_sso_login(login.id); + Self { + login, + action, + matrix_user, + } + } +} + +/// Context used by the `emails/recovery.{txt,html,subject}` templates +#[derive(Serialize)] +pub struct EmailRecoveryContext { + user: User, + session: UserRecoverySession, + recovery_link: Url, +} + +impl EmailRecoveryContext { + /// Constructs a context for the recovery email + #[must_use] + pub fn new(user: User, session: UserRecoverySession, recovery_link: Url) -> Self { + Self { + user, + session, + recovery_link, + } + } + + /// Returns the user associated with the recovery email + #[must_use] + pub fn user(&self) -> &User { + &self.user + } + + /// Returns the recovery session associated with the recovery email + #[must_use] + pub fn session(&self) -> &UserRecoverySession { + &self.session + } +} + +impl TemplateContext for EmailRecoveryContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(User::samples(now, rng).into_iter().map(|user| { + let session = UserRecoverySession { + id: Ulid::from_datetime_with_source(now.into(), rng), + email: "hello@example.com".to_owned(), + user_agent: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/536.30.1 (KHTML, like Gecko) Version/6.0.5 Safari/536.30.1".to_owned(), + ip_address: Some(IpAddr::from([192_u8, 0, 2, 1])), + locale: "en".to_owned(), + created_at: now, + consumed_at: None, + }; + + let link = "https://example.com/recovery/complete?ticket=abcdefghijklmnopqrstuvwxyz0123456789".parse().unwrap(); + + Self::new(user, session, link) + }).collect()) + } +} + +/// Context used by the `emails/verification.{txt,html,subject}` templates +#[derive(Serialize)] +pub struct EmailVerificationContext { + #[serde(skip_serializing_if = "Option::is_none")] + browser_session: Option, + #[serde(skip_serializing_if = "Option::is_none")] + user_registration: Option, + authentication_code: UserEmailAuthenticationCode, +} + +impl EmailVerificationContext { + /// Constructs a context for the verification email + #[must_use] + pub fn new( + authentication_code: UserEmailAuthenticationCode, + browser_session: Option, + user_registration: Option, + ) -> Self { + Self { + browser_session, + user_registration, + authentication_code, + } + } + + /// Get the user to which this email is being sent + #[must_use] + pub fn user(&self) -> Option<&User> { + self.browser_session.as_ref().map(|s| &s.user) + } + + /// Get the verification code being sent + #[must_use] + pub fn code(&self) -> &str { + &self.authentication_code.code + } +} + +impl TemplateContext for EmailVerificationContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list( + BrowserSession::samples(now, rng) + .into_iter() + .map(|browser_session| { + let authentication_code = UserEmailAuthenticationCode { + id: Ulid::from_datetime_with_source(now.into(), rng), + user_email_authentication_id: Ulid::from_datetime_with_source( + now.into(), + rng, + ), + code: "123456".to_owned(), + created_at: now - Duration::try_minutes(5).unwrap(), + expires_at: now + Duration::try_minutes(25).unwrap(), + }; + + Self { + browser_session: Some(browser_session), + user_registration: None, + authentication_code, + } + }) + .collect(), + ) + } +} + +/// Fields of the email verification form +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum RegisterStepsVerifyEmailFormField { + /// The code field + Code, +} + +impl FormField for RegisterStepsVerifyEmailFormField { + fn keep(&self) -> bool { + match self { + Self::Code => true, + } + } +} + +/// Context used by the `pages/register/steps/verify_email.html` templates +#[derive(Serialize)] +pub struct RegisterStepsVerifyEmailContext { + form: FormState, + authentication: UserEmailAuthentication, +} + +impl RegisterStepsVerifyEmailContext { + /// Constructs a context for the email verification page + #[must_use] + pub fn new(authentication: UserEmailAuthentication) -> Self { + Self { + form: FormState::default(), + authentication, + } + } + + /// Set the form state + #[must_use] + pub fn with_form_state(self, form: FormState) -> Self { + Self { form, ..self } + } +} + +impl TemplateContext for RegisterStepsVerifyEmailContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + let authentication = UserEmailAuthentication { + id: Ulid::from_datetime_with_source(now.into(), rng), + user_session_id: None, + user_registration_id: None, + email: "foobar@example.com".to_owned(), + created_at: now, + completed_at: None, + }; + + sample_list(vec![Self { + form: FormState::default(), + authentication, + }]) + } +} + +/// Context used by the `pages/register/steps/email_in_use.html` template +#[derive(Serialize)] +pub struct RegisterStepsEmailInUseContext { + email: String, + action: Option, +} + +impl RegisterStepsEmailInUseContext { + /// Constructs a context for the email in use page + #[must_use] + pub fn new(email: String, action: Option) -> Self { + Self { email, action } + } +} + +impl TemplateContext for RegisterStepsEmailInUseContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + let email = "hello@example.com".to_owned(); + let action = PostAuthAction::continue_grant(Ulid::nil()); + sample_list(vec![Self::new(email, Some(action))]) + } +} + +/// Fields for the display name form +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum RegisterStepsDisplayNameFormField { + /// The display name + DisplayName, +} + +impl FormField for RegisterStepsDisplayNameFormField { + fn keep(&self) -> bool { + match self { + Self::DisplayName => true, + } + } +} + +/// Context used by the `display_name.html` template +#[derive(Serialize, Default)] +pub struct RegisterStepsDisplayNameContext { + form: FormState, +} + +impl RegisterStepsDisplayNameContext { + /// Constructs a context for the display name page + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the form state + #[must_use] + pub fn with_form_state( + mut self, + form_state: FormState, + ) -> Self { + self.form = form_state; + self + } +} + +impl TemplateContext for RegisterStepsDisplayNameContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![Self { + form: FormState::default(), + }]) + } +} + +/// Fields of the registration token form +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum RegisterStepsRegistrationTokenFormField { + /// The registration token + Token, +} + +impl FormField for RegisterStepsRegistrationTokenFormField { + fn keep(&self) -> bool { + match self { + Self::Token => true, + } + } +} + +/// The registration token page context +#[derive(Serialize, Default)] +pub struct RegisterStepsRegistrationTokenContext { + form: FormState, +} + +impl RegisterStepsRegistrationTokenContext { + /// Constructs a context for the registration token page + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the form state + #[must_use] + pub fn with_form_state( + mut self, + form_state: FormState, + ) -> Self { + self.form = form_state; + self + } +} + +impl TemplateContext for RegisterStepsRegistrationTokenContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![Self { + form: FormState::default(), + }]) + } +} + +/// Fields of the account recovery start form +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum RecoveryStartFormField { + /// The email + Email, +} + +impl FormField for RecoveryStartFormField { + fn keep(&self) -> bool { + match self { + Self::Email => true, + } + } +} + +/// Context used by the `pages/recovery/start.html` template +#[derive(Serialize, Default)] +pub struct RecoveryStartContext { + form: FormState, +} + +impl RecoveryStartContext { + /// Constructs a context for the recovery start page + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the form state + #[must_use] + pub fn with_form_state(self, form: FormState) -> Self { + Self { form } + } +} + +impl TemplateContext for RecoveryStartContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![ + Self::new(), + Self::new().with_form_state( + FormState::default() + .with_error_on_field(RecoveryStartFormField::Email, FieldError::Required), + ), + Self::new().with_form_state( + FormState::default() + .with_error_on_field(RecoveryStartFormField::Email, FieldError::Invalid), + ), + ]) + } +} + +/// Context used by the `pages/recovery/progress.html` template +#[derive(Serialize)] +pub struct RecoveryProgressContext { + session: UserRecoverySession, + /// Whether resending the e-mail was denied because of rate limits + resend_failed_due_to_rate_limit: bool, +} + +impl RecoveryProgressContext { + /// Constructs a context for the recovery progress page + #[must_use] + pub fn new(session: UserRecoverySession, resend_failed_due_to_rate_limit: bool) -> Self { + Self { + session, + resend_failed_due_to_rate_limit, + } + } +} + +impl TemplateContext for RecoveryProgressContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + let session = UserRecoverySession { + id: Ulid::from_datetime_with_source(now.into(), rng), + email: "name@mail.com".to_owned(), + user_agent: "Mozilla/5.0".to_owned(), + ip_address: None, + locale: "en".to_owned(), + created_at: now, + consumed_at: None, + }; + + sample_list(vec![ + Self { + session: session.clone(), + resend_failed_due_to_rate_limit: false, + }, + Self { + session, + resend_failed_due_to_rate_limit: true, + }, + ]) + } +} + +/// Context used by the `pages/recovery/expired.html` template +#[derive(Serialize)] +pub struct RecoveryExpiredContext { + session: UserRecoverySession, +} + +impl RecoveryExpiredContext { + /// Constructs a context for the recovery expired page + #[must_use] + pub fn new(session: UserRecoverySession) -> Self { + Self { session } + } +} + +impl TemplateContext for RecoveryExpiredContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + let session = UserRecoverySession { + id: Ulid::from_datetime_with_source(now.into(), rng), + email: "name@mail.com".to_owned(), + user_agent: "Mozilla/5.0".to_owned(), + ip_address: None, + locale: "en".to_owned(), + created_at: now, + consumed_at: None, + }; + + sample_list(vec![Self { session }]) + } +} +/// Fields of the account recovery finish form +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum RecoveryFinishFormField { + /// The new password + NewPassword, + + /// The new password confirmation + NewPasswordConfirm, +} + +impl FormField for RecoveryFinishFormField { + fn keep(&self) -> bool { + false + } +} + +/// Context used by the `pages/recovery/finish.html` template +#[derive(Serialize)] +pub struct RecoveryFinishContext { + user: User, + form: FormState, +} + +impl RecoveryFinishContext { + /// Constructs a context for the recovery finish page + #[must_use] + pub fn new(user: User) -> Self { + Self { + user, + form: FormState::default(), + } + } + + /// Set the form state + #[must_use] + pub fn with_form_state(mut self, form: FormState) -> Self { + self.form = form; + self + } +} + +impl TemplateContext for RecoveryFinishContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list( + User::samples(now, rng) + .into_iter() + .flat_map(|user| { + vec![ + Self::new(user.clone()), + Self::new(user.clone()).with_form_state( + FormState::default().with_error_on_field( + RecoveryFinishFormField::NewPassword, + FieldError::Invalid, + ), + ), + Self::new(user.clone()).with_form_state( + FormState::default().with_error_on_field( + RecoveryFinishFormField::NewPasswordConfirm, + FieldError::Invalid, + ), + ), + ] + }) + .collect(), + ) + } +} + +/// Context used by the `pages/upstream_oauth2/link_mismatch.html` +/// templates +#[derive(Serialize)] +pub struct UpstreamExistingLinkContext { + linked_user: User, +} + +impl UpstreamExistingLinkContext { + /// Constructs a new context with an existing linked user + #[must_use] + pub fn new(linked_user: User) -> Self { + Self { linked_user } + } +} + +impl TemplateContext for UpstreamExistingLinkContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list( + User::samples(now, rng) + .into_iter() + .map(|linked_user| Self { linked_user }) + .collect(), + ) + } +} + +/// Context used by the `pages/upstream_oauth2/suggest_link.html` +/// templates +#[derive(Serialize)] +pub struct UpstreamSuggestLink { + post_logout_action: PostAuthAction, +} + +impl UpstreamSuggestLink { + /// Constructs a new context with an existing linked user + #[must_use] + pub fn new(link: &UpstreamOAuthLink) -> Self { + Self::for_link_id(link.id) + } + + fn for_link_id(id: Ulid) -> Self { + let post_logout_action = PostAuthAction::link_upstream(id); + Self { post_logout_action } + } +} + +impl TemplateContext for UpstreamSuggestLink { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + let id = Ulid::from_datetime_with_source(now.into(), rng); + sample_list(vec![Self::for_link_id(id)]) + } +} + +/// User-editeable fields of the upstream account link form +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum UpstreamRegisterFormField { + /// The username field + Username, + + /// Accept the terms of service + AcceptTerms, +} + +impl FormField for UpstreamRegisterFormField { + fn keep(&self) -> bool { + match self { + Self::Username | Self::AcceptTerms => true, + } + } +} + +/// Context used by the `pages/upstream_oauth2/do_register.html` +/// templates +#[derive(Serialize)] +pub struct UpstreamRegister { + upstream_oauth_link: UpstreamOAuthLink, + upstream_oauth_provider: UpstreamOAuthProvider, + imported_localpart: Option, + force_localpart: bool, + imported_display_name: Option, + force_display_name: bool, + imported_email: Option, + force_email: bool, + form_state: FormState, +} + +impl UpstreamRegister { + /// Constructs a new context for registering a new user from an upstream + /// provider + #[must_use] + pub fn new( + upstream_oauth_link: UpstreamOAuthLink, + upstream_oauth_provider: UpstreamOAuthProvider, + ) -> Self { + Self { + upstream_oauth_link, + upstream_oauth_provider, + imported_localpart: None, + force_localpart: false, + imported_display_name: None, + force_display_name: false, + imported_email: None, + force_email: false, + form_state: FormState::default(), + } + } + + /// Set the imported localpart + pub fn set_localpart(&mut self, localpart: String, force: bool) { + self.imported_localpart = Some(localpart); + self.force_localpart = force; + } + + /// Set the imported localpart + #[must_use] + pub fn with_localpart(self, localpart: String, force: bool) -> Self { + Self { + imported_localpart: Some(localpart), + force_localpart: force, + ..self + } + } + + /// Set the imported display name + pub fn set_display_name(&mut self, display_name: String, force: bool) { + self.imported_display_name = Some(display_name); + self.force_display_name = force; + } + + /// Set the imported display name + #[must_use] + pub fn with_display_name(self, display_name: String, force: bool) -> Self { + Self { + imported_display_name: Some(display_name), + force_display_name: force, + ..self + } + } + + /// Set the imported email + pub fn set_email(&mut self, email: String, force: bool) { + self.imported_email = Some(email); + self.force_email = force; + } + + /// Set the imported email + #[must_use] + pub fn with_email(self, email: String, force: bool) -> Self { + Self { + imported_email: Some(email), + force_email: force, + ..self + } + } + + /// Set the form state + pub fn set_form_state(&mut self, form_state: FormState) { + self.form_state = form_state; + } + + /// Set the form state + #[must_use] + pub fn with_form_state(self, form_state: FormState) -> Self { + Self { form_state, ..self } + } +} + +impl TemplateContext for UpstreamRegister { + fn sample( + now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![Self::new( + UpstreamOAuthLink { + id: Ulid::nil(), + provider_id: Ulid::nil(), + user_id: None, + subject: "subject".to_owned(), + human_account_name: Some("@john".to_owned()), + created_at: now, + }, + UpstreamOAuthProvider { + id: Ulid::nil(), + issuer: Some("https://example.com/".to_owned()), + human_name: Some("Example Ltd.".to_owned()), + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::ClientSecretBasic, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + client_id: "client-id".to_owned(), + encrypted_client_secret: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + authorization_endpoint_override: None, + token_endpoint_override: None, + jwks_uri_override: None, + userinfo_endpoint_override: None, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + discovery_mode: UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + created_at: now, + disabled_at: None, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + )]) + } +} + +/// Form fields on the device link page +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum DeviceLinkFormField { + /// The device code field + Code, +} + +impl FormField for DeviceLinkFormField { + fn keep(&self) -> bool { + match self { + Self::Code => true, + } + } +} + +/// Context used by the `device_link.html` template +#[derive(Serialize, Default, Debug)] +pub struct DeviceLinkContext { + form_state: FormState, +} + +impl DeviceLinkContext { + /// Constructs a new context with an existing linked user + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the form state + #[must_use] + pub fn with_form_state(mut self, form_state: FormState) -> Self { + self.form_state = form_state; + self + } +} + +impl TemplateContext for DeviceLinkContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![ + Self::new(), + Self::new().with_form_state( + FormState::default() + .with_error_on_field(DeviceLinkFormField::Code, FieldError::Required), + ), + ]) + } +} + +/// Context used by the `device_consent.html` template +#[derive(Serialize, Debug)] +pub struct DeviceConsentContext { + grant: DeviceCodeGrant, + client: Client, + matrix_user: MatrixUser, +} + +impl DeviceConsentContext { + /// Constructs a new context with an existing linked user + #[must_use] + pub fn new(grant: DeviceCodeGrant, client: Client, matrix_user: MatrixUser) -> Self { + Self { + grant, + client, + matrix_user, + } + } +} + +impl TemplateContext for DeviceConsentContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(Client::samples(now, rng) + .into_iter() + .map(|client| { + let grant = DeviceCodeGrant { + id: Ulid::from_datetime_with_source(now.into(), rng), + state: mas_data_model::DeviceCodeGrantState::Pending, + client_id: client.id, + scope: [OPENID].into_iter().collect(), + user_code: Alphanumeric.sample_string(rng, 6).to_uppercase(), + device_code: Alphanumeric.sample_string(rng, 32), + created_at: now - Duration::try_minutes(5).unwrap(), + expires_at: now + Duration::try_minutes(25).unwrap(), + ip_address: Some(IpAddr::V4(Ipv4Addr::LOCALHOST)), + user_agent: Some("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.0.0 Safari/537.36".to_owned()), + }; + Self { + grant, + client, + matrix_user: MatrixUser { + mxid: "@alice:example.com".to_owned(), + display_name: Some("Alice".to_owned()), + } + } + }) + .collect()) + } +} + +/// Context used by the `account/deactivated.html` and `account/locked.html` +/// templates +#[derive(Serialize)] +pub struct AccountInactiveContext { + user: User, +} + +impl AccountInactiveContext { + /// Constructs a new context with an existing linked user + #[must_use] + pub fn new(user: User) -> Self { + Self { user } + } +} + +impl TemplateContext for AccountInactiveContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list( + User::samples(now, rng) + .into_iter() + .map(|user| AccountInactiveContext { user }) + .collect(), + ) + } +} + +/// Context used by the `device_name.txt` template +#[derive(Serialize)] +pub struct DeviceNameContext { + client: Client, + raw_user_agent: String, +} + +impl DeviceNameContext { + /// Constructs a new context with a client and user agent + #[must_use] + pub fn new(client: Client, user_agent: Option) -> Self { + Self { + client, + raw_user_agent: user_agent.unwrap_or_default(), + } + } +} + +impl TemplateContext for DeviceNameContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(Client::samples(now, rng) + .into_iter() + .map(|client| DeviceNameContext { + client, + raw_user_agent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.0.0 Safari/537.36".to_owned(), + }) + .collect()) + } +} + +/// Context used by the `form_post.html` template +#[derive(Serialize)] +pub struct FormPostContext { + redirect_uri: Option, + params: T, +} + +impl TemplateContext for FormPostContext { + fn sample( + now: chrono::DateTime, + rng: &mut R, + locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + let sample_params = T::sample(now, rng, locales); + sample_params + .into_iter() + .map(|(k, params)| { + ( + k, + FormPostContext { + redirect_uri: "https://example.com/callback".parse().ok(), + params, + }, + ) + }) + .collect() + } +} + +impl FormPostContext { + /// Constructs a context for the `form_post` response mode form for a given + /// URL + pub fn new_for_url(redirect_uri: Url, params: T) -> Self { + Self { + redirect_uri: Some(redirect_uri), + params, + } + } + + /// Constructs a context for the `form_post` response mode form for the + /// current URL + pub fn new_for_current_url(params: T) -> Self { + Self { + redirect_uri: None, + params, + } + } + + /// Add the language to the context + /// + /// This is usually implemented by the [`TemplateContext`] trait, but it is + /// annoying to make it work because of the generic parameter + pub fn with_language(self, lang: &DataLocale) -> WithLanguage { + WithLanguage { + lang: lang.to_string(), + inner: self, + } + } +} + +/// Context used by the `error.html` template +#[derive(Default, Serialize, Debug, Clone)] +pub struct ErrorContext { + code: Option<&'static str>, + description: Option, + details: Option, + lang: Option, +} + +impl std::fmt::Display for ErrorContext { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + if let Some(code) = &self.code { + writeln!(f, "code: {code}")?; + } + if let Some(description) = &self.description { + writeln!(f, "{description}")?; + } + + if let Some(details) = &self.details { + writeln!(f, "details: {details}")?; + } + + Ok(()) + } +} + +impl TemplateContext for ErrorContext { + fn sample( + _now: chrono::DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![ + Self::new() + .with_code("sample_error") + .with_description("A fancy description".into()) + .with_details("Something happened".into()), + Self::new().with_code("another_error"), + Self::new(), + ]) + } +} + +impl ErrorContext { + /// Constructs a context for the error page + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Add the error code to the context + #[must_use] + pub fn with_code(mut self, code: &'static str) -> Self { + self.code = Some(code); + self + } + + /// Add the error description to the context + #[must_use] + pub fn with_description(mut self, description: String) -> Self { + self.description = Some(description); + self + } + + /// Add the error details to the context + #[must_use] + pub fn with_details(mut self, details: String) -> Self { + self.details = Some(details); + self + } + + /// Add the language to the context + #[must_use] + pub fn with_language(mut self, lang: &DataLocale) -> Self { + self.lang = Some(lang.to_string()); + self + } + + /// Get the error code, if any + #[must_use] + pub fn code(&self) -> Option<&'static str> { + self.code + } + + /// Get the description, if any + #[must_use] + pub fn description(&self) -> Option<&str> { + self.description.as_deref() + } + + /// Get the details, if any + #[must_use] + pub fn details(&self) -> Option<&str> { + self.details.as_deref() + } +} + +/// Context used by the not found (`404.html`) template +#[derive(Serialize)] +pub struct NotFoundContext { + method: String, + version: String, + uri: String, +} + +impl NotFoundContext { + /// Constructs a context for the not found page + #[must_use] + pub fn new(method: &Method, version: Version, uri: &Uri) -> Self { + Self { + method: method.to_string(), + version: format!("{version:?}"), + uri: uri.to_string(), + } + } +} + +impl TemplateContext for NotFoundContext { + fn sample( + _now: DateTime, + _rng: &mut R, + _locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + sample_list(vec![ + Self::new(&Method::GET, Version::HTTP_11, &"/".parse().unwrap()), + Self::new(&Method::POST, Version::HTTP_2, &"/foo/bar".parse().unwrap()), + Self::new( + &Method::PUT, + Version::HTTP_10, + &"/foo?bar=baz".parse().unwrap(), + ), + ]) + } +} diff --git a/matrix-authentication-service/crates/templates/src/context/branding.rs b/matrix-authentication-service/crates/templates/src/context/branding.rs new file mode 100644 index 00000000..15932567 --- /dev/null +++ b/matrix-authentication-service/crates/templates/src/context/branding.rs @@ -0,0 +1,71 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use minijinja::{ + Value, + value::{Enumerator, Object}, +}; + +/// Site branding information. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SiteBranding { + server_name: Arc, + policy_uri: Option>, + tos_uri: Option>, + imprint: Option>, +} + +impl SiteBranding { + /// Create a new site branding based on the given server name. + #[must_use] + pub fn new(server_name: impl Into>) -> Self { + Self { + server_name: server_name.into(), + policy_uri: None, + tos_uri: None, + imprint: None, + } + } + + /// Set the policy URI. + #[must_use] + pub fn with_policy_uri(mut self, policy_uri: impl Into>) -> Self { + self.policy_uri = Some(policy_uri.into()); + self + } + + /// Set the terms of service URI. + #[must_use] + pub fn with_tos_uri(mut self, tos_uri: impl Into>) -> Self { + self.tos_uri = Some(tos_uri.into()); + self + } + + /// Set the imprint. + #[must_use] + pub fn with_imprint(mut self, imprint: impl Into>) -> Self { + self.imprint = Some(imprint.into()); + self + } +} + +impl Object for SiteBranding { + fn get_value(self: &Arc, name: &Value) -> Option { + match name.as_str()? { + "server_name" => Some(self.server_name.clone().into()), + "policy_uri" => Some(Value::from(self.policy_uri.clone())), + "tos_uri" => Some(Value::from(self.tos_uri.clone())), + "imprint" => Some(Value::from(self.imprint.clone())), + _ => None, + } + } + + fn enumerate(self: &Arc) -> Enumerator { + Enumerator::Str(&["server_name", "policy_uri", "tos_uri", "imprint"]) + } +} diff --git a/matrix-authentication-service/crates/templates/src/context/captcha.rs b/matrix-authentication-service/crates/templates/src/context/captcha.rs new file mode 100644 index 00000000..f9d8723b --- /dev/null +++ b/matrix-authentication-service/crates/templates/src/context/captcha.rs @@ -0,0 +1,75 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::BTreeMap, sync::Arc}; + +use mas_i18n::DataLocale; +use minijinja::{ + Value, + value::{Enumerator, Object}, +}; +use rand::Rng; +use serde::Serialize; + +use crate::{TemplateContext, context::SampleIdentifier}; + +#[derive(Debug)] +struct CaptchaConfig(mas_data_model::CaptchaConfig); + +impl Object for CaptchaConfig { + fn get_value(self: &Arc, key: &Value) -> Option { + match key.as_str() { + Some("service") => Some(match &self.0.service { + mas_data_model::CaptchaService::RecaptchaV2 => "recaptcha_v2".into(), + mas_data_model::CaptchaService::CloudflareTurnstile => { + "cloudflare_turnstile".into() + } + mas_data_model::CaptchaService::HCaptcha => "hcaptcha".into(), + }), + Some("site_key") => Some(self.0.site_key.clone().into()), + _ => None, + } + } + + fn enumerate(self: &Arc) -> Enumerator { + Enumerator::Str(&["service", "site_key"]) + } +} + +/// Context with an optional CAPTCHA configuration in it +#[derive(Serialize)] +pub struct WithCaptcha { + captcha: Option, + + #[serde(flatten)] + inner: T, +} + +impl WithCaptcha { + #[must_use] + pub(crate) fn new(captcha: Option, inner: T) -> Self { + Self { + captcha: captcha.map(|captcha| Value::from_object(CaptchaConfig(captcha))), + inner, + } + } +} + +impl TemplateContext for WithCaptcha { + fn sample( + now: chrono::DateTime, + rng: &mut R, + locales: &[DataLocale], + ) -> BTreeMap + where + Self: Sized, + { + T::sample(now, rng, locales) + .into_iter() + .map(|(k, inner)| (k, Self::new(None, inner))) + .collect() + } +} diff --git a/matrix-authentication-service/crates/templates/src/context/ext.rs b/matrix-authentication-service/crates/templates/src/context/ext.rs new file mode 100644 index 00000000..679ad91a --- /dev/null +++ b/matrix-authentication-service/crates/templates/src/context/ext.rs @@ -0,0 +1,54 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use mas_data_model::SiteConfig; + +use super::{SiteBranding, SiteFeatures}; + +mod private { + pub trait Sealed {} + impl Sealed for mas_data_model::SiteConfig {} +} + +/// Extension trait for [`SiteConfig`] to construct [`SiteBranding`] and +/// [`SiteFeatures`] from it. +pub trait SiteConfigExt: private::Sealed { + /// Construct a [`SiteBranding`] from the [`SiteConfig`]. + fn templates_branding(&self) -> SiteBranding; + + /// Construct a [`SiteFeatures`] from the [`SiteConfig`]. + fn templates_features(&self) -> SiteFeatures; +} + +impl SiteConfigExt for SiteConfig { + fn templates_branding(&self) -> SiteBranding { + let mut branding = SiteBranding::new(self.server_name.clone()); + + if let Some(policy_uri) = &self.policy_uri { + branding = branding.with_policy_uri(policy_uri.as_str()); + } + + if let Some(tos_uri) = &self.tos_uri { + branding = branding.with_tos_uri(tos_uri.as_str()); + } + + if let Some(imprint) = &self.imprint { + branding = branding.with_imprint(imprint.as_str()); + } + + branding + } + + fn templates_features(&self) -> SiteFeatures { + SiteFeatures { + password_registration: self.password_registration_enabled, + password_registration_email_required: self.password_registration_email_required, + password_login: self.password_login_enabled, + account_recovery: self.account_recovery_allowed, + login_with_email_allowed: self.login_with_email_allowed, + } + } +} diff --git a/matrix-authentication-service/crates/templates/src/context/features.rs b/matrix-authentication-service/crates/templates/src/context/features.rs new file mode 100644 index 00000000..07e80f70 --- /dev/null +++ b/matrix-authentication-service/crates/templates/src/context/features.rs @@ -0,0 +1,57 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::sync::Arc; + +use minijinja::{ + Value, + value::{Enumerator, Object}, +}; + +/// Site features information. +#[allow(clippy::struct_excessive_bools)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct SiteFeatures { + /// Whether local password-based registration is enabled. + pub password_registration: bool, + + /// Whether local password-based registration requires an email address. + pub password_registration_email_required: bool, + + /// Whether local password-based login is enabled. + pub password_login: bool, + + /// Whether email-based account recovery is enabled. + pub account_recovery: bool, + + /// Whether users can log in with their email address. + pub login_with_email_allowed: bool, +} + +impl Object for SiteFeatures { + fn get_value(self: &Arc, field: &Value) -> Option { + match field.as_str()? { + "password_registration" => Some(Value::from(self.password_registration)), + "password_registration_email_required" => { + Some(Value::from(self.password_registration_email_required)) + } + "password_login" => Some(Value::from(self.password_login)), + "account_recovery" => Some(Value::from(self.account_recovery)), + "login_with_email_allowed" => Some(Value::from(self.login_with_email_allowed)), + _ => None, + } + } + + fn enumerate(self: &Arc) -> Enumerator { + Enumerator::Str(&[ + "password_registration", + "password_registration_email_required", + "password_login", + "account_recovery", + "login_with_email_allowed", + ]) + } +} diff --git a/matrix-authentication-service/crates/templates/src/forms.rs b/matrix-authentication-service/crates/templates/src/forms.rs new file mode 100644 index 00000000..94e20be3 --- /dev/null +++ b/matrix-authentication-service/crates/templates/src/forms.rs @@ -0,0 +1,291 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::{collections::HashMap, hash::Hash}; + +use serde::{Deserialize, Serialize}; + +/// A trait which should be used for form field enums +pub trait FormField: Copy + Hash + PartialEq + Eq + Serialize + for<'de> Deserialize<'de> { + /// Return false for fields where values should not be kept (e.g. password + /// fields) + fn keep(&self) -> bool; +} + +/// An error on a form field +#[derive(Debug, Serialize)] +#[serde(rename_all = "snake_case", tag = "kind")] +pub enum FieldError { + /// A required field is missing + Required, + + /// An unspecified error on the field + Unspecified, + + /// Invalid value for this field + Invalid, + + /// The password confirmation doesn't match the password + PasswordMismatch, + + /// That value already exists + Exists, + + /// Denied by the policy + Policy { + /// Well-known policy code + code: Option<&'static str>, + + /// Message for this policy violation + message: String, + }, +} + +/// An error on the whole form +#[derive(Debug, Serialize)] +#[serde(rename_all = "snake_case", tag = "kind")] +pub enum FormError { + /// The given credentials are not valid + InvalidCredentials, + + /// Password fields don't match + PasswordMismatch, + + /// There was an internal error + Internal, + + /// Rate limit exceeded + RateLimitExceeded, + + /// Denied by the policy + Policy { + /// Well-known policy code + code: Option<&'static str>, + + /// Message for this policy violation + message: String, + }, + + /// Failed to validate CAPTCHA + Captcha, +} + +#[derive(Debug, Default, Serialize)] +struct FieldState { + value: Option, + errors: Vec, +} + +/// The state of a form and its fields +#[derive(Debug, Serialize)] +pub struct FormState { + fields: HashMap, + errors: Vec, + + #[serde(skip)] + has_errors: bool, +} + +impl Default for FormState { + fn default() -> Self { + FormState { + fields: HashMap::default(), + errors: Vec::default(), + has_errors: false, + } + } +} + +#[derive(Deserialize, PartialEq, Eq, Hash)] +#[serde(untagged)] +enum KeyOrOther { + Key(K), + Other(String), +} + +impl KeyOrOther { + fn key(self) -> Option { + match self { + Self::Key(key) => Some(key), + Self::Other(_) => None, + } + } +} + +impl FormState { + /// Generate a [`FormState`] out of a form + /// + /// # Panics + /// + /// If the form fails to serialize, or the form field keys fail to + /// deserialize + pub fn from_form(form: &F) -> Self { + let form = serde_json::to_value(form).unwrap(); + let fields: HashMap, Option> = serde_json::from_value(form).unwrap(); + + let fields = fields + .into_iter() + .filter_map(|(key, value)| { + let key = key.key()?; + let value = key.keep().then_some(value).flatten(); + let field = FieldState { + value, + errors: Vec::new(), + }; + Some((key, field)) + }) + .collect(); + + FormState { + fields, + errors: Vec::new(), + has_errors: false, + } + } + + /// Add an error on a form field + pub fn add_error_on_field(&mut self, field: K, error: FieldError) { + self.fields.entry(field).or_default().errors.push(error); + self.has_errors = true; + } + + /// Add an error on a form field + #[must_use] + pub fn with_error_on_field(mut self, field: K, error: FieldError) -> Self { + self.add_error_on_field(field, error); + self + } + + /// Add an error on the form + pub fn add_error_on_form(&mut self, error: FormError) { + self.errors.push(error); + self.has_errors = true; + } + + /// Add an error on the form + #[must_use] + pub fn with_error_on_form(mut self, error: FormError) -> Self { + self.add_error_on_form(error); + self + } + + /// Set a value on the form + pub fn set_value(&mut self, field: K, value: Option) { + self.fields.entry(field).or_default().value = value; + } + + /// Checks if a field contains a value + pub fn has_value(&self, field: K) -> bool { + self.fields.get(&field).is_some_and(|f| f.value.is_some()) + } + + /// Returns `true` if the form has no error attached to it + #[must_use] + pub fn is_valid(&self) -> bool { + !self.has_errors + } +} + +/// Utility trait to help creating [`FormState`] out of a form +pub trait ToFormState: Serialize { + /// The enum used for field names + type Field: FormField; + + /// Generate a [`FormState`] out of [`Self`] + /// + /// # Panics + /// + /// If the form fails to serialize or [`Self::Field`] fails to deserialize + fn to_form_state(&self) -> FormState { + FormState::from_form(&self) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[derive(Serialize)] + struct TestForm { + foo: String, + bar: String, + } + + #[derive(Serialize, Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] + #[serde(rename_all = "snake_case")] + enum TestFormField { + Foo, + Bar, + } + + impl FormField for TestFormField { + fn keep(&self) -> bool { + match self { + Self::Foo => true, + Self::Bar => false, + } + } + } + + impl ToFormState for TestForm { + type Field = TestFormField; + } + + #[test] + fn form_state_serialization() { + let form = TestForm { + foo: "john".to_owned(), + bar: "hunter2".to_owned(), + }; + + let state = form.to_form_state(); + let state = serde_json::to_value(state).unwrap(); + assert_eq!( + state, + serde_json::json!({ + "errors": [], + "fields": { + "foo": { + "errors": [], + "value": "john", + }, + "bar": { + "errors": [], + "value": null + }, + } + }) + ); + + let form = TestForm { + foo: String::new(), + bar: String::new(), + }; + let state = form + .to_form_state() + .with_error_on_field(TestFormField::Foo, FieldError::Required) + .with_error_on_field(TestFormField::Bar, FieldError::Required) + .with_error_on_form(FormError::InvalidCredentials); + + let state = serde_json::to_value(state).unwrap(); + assert_eq!( + state, + serde_json::json!({ + "errors": [{"kind": "invalid_credentials"}], + "fields": { + "foo": { + "errors": [{"kind": "required"}], + "value": "", + }, + "bar": { + "errors": [{"kind": "required"}], + "value": null + }, + } + }) + ); + } +} diff --git a/matrix-authentication-service/crates/templates/src/functions.rs b/matrix-authentication-service/crates/templates/src/functions.rs new file mode 100644 index 00000000..f860580d --- /dev/null +++ b/matrix-authentication-service/crates/templates/src/functions.rs @@ -0,0 +1,658 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// This is needed to make the Environment::add* functions work +#![allow(clippy::needless_pass_by_value)] + +//! Additional functions, tests and filters used in templates + +use std::{ + collections::{BTreeMap, HashMap, HashSet}, + fmt::{Formatter, Write as _}, + str::FromStr, + sync::{Arc, Mutex, atomic::AtomicUsize}, +}; + +use camino::{Utf8Path, Utf8PathBuf}; +use mas_i18n::{Argument, ArgumentList, DataLocale, Translator, sprintf::FormattedMessagePart}; +use mas_router::UrlBuilder; +use mas_spa::ViteManifest; +use minijinja::{ + Error, ErrorKind, State, Value, escape_formatter, + machinery::make_string_output, + value::{Kwargs, Object, ViaDeserialize, from_args}, +}; +use url::Url; + +pub fn register( + env: &mut minijinja::Environment, + url_builder: UrlBuilder, + vite_manifest: Option, + translator: Arc, +) { + env.set_unknown_method_callback(minijinja_contrib::pycompat::unknown_method_callback); + + minijinja_contrib::add_to_environment(env); + env.add_test("empty", self::tester_empty); + env.add_filter("to_params", filter_to_params); + env.add_filter("simplify_url", filter_simplify_url); + env.add_filter("add_slashes", filter_add_slashes); + env.add_filter("parse_user_agent", filter_parse_user_agent); + env.add_filter("id_color_hash", filter_id_color_hash); + env.add_function("add_params_to_url", function_add_params_to_url); + env.add_function("counter", || Ok(Value::from_object(Counter::default()))); + if let Some(vite_manifest) = vite_manifest { + env.add_global( + "include_asset", + Value::from_object(IncludeAsset { + url_builder: url_builder.clone(), + vite_manifest, + }), + ); + } else { + env.add_global("include_asset", Value::from_object(FakeIncludeAsset {})); + } + env.add_global( + "translator", + Value::from_object(TranslatorFunc { translator }), + ); + env.add_filter("prefix_url", move |url: &str| -> String { + if !url.starts_with('/') { + // Let's assume it's not an internal URL and return it as-is + return url.to_owned(); + } + + let Some(prefix) = url_builder.prefix() else { + // If there is no prefix to add, return the URL as-is + return url.to_owned(); + }; + + format!("{prefix}{url}") + }); +} + +fn tester_empty(seq: Value) -> bool { + seq.len() == Some(0) +} + +fn filter_add_slashes(value: &str) -> String { + value + .replace('\\', "\\\\") + .replace('\"', "\\\"") + .replace('\'', "\\\'") +} + +fn filter_to_params(params: &Value, kwargs: Kwargs) -> Result { + let params = serde_urlencoded::to_string(params).map_err(|e| { + Error::new( + ErrorKind::InvalidOperation, + "Could not serialize parameters", + ) + .with_source(e) + })?; + + let prefix = kwargs.get("prefix").unwrap_or(""); + kwargs.assert_all_used()?; + + if params.is_empty() { + Ok(String::new()) + } else { + Ok(format!("{prefix}{params}")) + } +} + +/// Filter which simplifies a URL to its domain name for HTTP(S) URLs +fn filter_simplify_url(url: &str, kwargs: Kwargs) -> Result { + // Do nothing if the URL is not valid + let Ok(mut url) = Url::from_str(url) else { + return Ok(url.to_owned()); + }; + + // Always at least remove the query parameters and fragment + url.set_query(None); + url.set_fragment(None); + + // Do nothing else for non-HTTPS URLs + if url.scheme() != "https" { + return Ok(url.to_string()); + } + + let keep_path = kwargs.get::>("keep_path")?.unwrap_or_default(); + kwargs.assert_all_used()?; + + // Only return the domain name + let Some(domain) = url.domain() else { + return Ok(url.to_string()); + }; + + if keep_path { + Ok(format!( + "{domain}{path}", + domain = domain, + path = url.path(), + )) + } else { + Ok(domain.to_owned()) + } +} + +/// Filter which computes a hash between 1 and 6 of an input string, identitical +/// to compound-web's `useIdColorHash` +fn filter_id_color_hash(input: &str) -> u32 { + input.chars().fold(0, |hash, c| hash + c as u32) % 6 + 1 +} + +/// Filter which parses a user-agent string +fn filter_parse_user_agent(user_agent: String) -> Value { + let user_agent = mas_data_model::UserAgent::parse(user_agent); + Value::from_serialize(user_agent) +} + +enum ParamsWhere { + Fragment, + Query, +} + +fn function_add_params_to_url( + uri: ViaDeserialize, + mode: &str, + params: ViaDeserialize>, +) -> Result { + use ParamsWhere::{Fragment, Query}; + + let mode = match mode { + "fragment" => Fragment, + "query" => Query, + _ => { + return Err(Error::new( + ErrorKind::InvalidOperation, + "Invalid `mode` parameter", + )); + } + }; + + // First, get the `uri`, `mode` and `params` parameters + // Get the relevant part of the URI and parse for existing parameters + let existing = match mode { + Fragment => uri.fragment(), + Query => uri.query(), + }; + let existing: HashMap = existing + .map(serde_urlencoded::from_str) + .transpose() + .map_err(|e| { + Error::new( + ErrorKind::InvalidOperation, + "Could not parse existing `uri` parameters", + ) + .with_source(e) + })? + .unwrap_or_default(); + + // Merge the exising and the additional parameters together + // Use a BTreeMap for determinism (because it orders keys) + let params: BTreeMap<&String, &Value> = params.iter().chain(existing.iter()).collect(); + + // Transform them back to urlencoded + let params = serde_urlencoded::to_string(params).map_err(|e| { + Error::new( + ErrorKind::InvalidOperation, + "Could not serialize back parameters", + ) + .with_source(e) + })?; + + let uri = { + let mut uri = uri; + match mode { + Fragment => uri.set_fragment(Some(¶ms)), + Query => uri.set_query(Some(¶ms)), + } + uri + }; + + Ok(uri.to_string()) +} + +struct TranslatorFunc { + translator: Arc, +} + +impl std::fmt::Debug for TranslatorFunc { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TranslatorFunc") + .field("translator", &"..") + .finish() + } +} + +impl std::fmt::Display for TranslatorFunc { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("translator") + } +} + +impl Object for TranslatorFunc { + fn call(self: &Arc, _state: &State, args: &[Value]) -> Result { + let (lang,): (&str,) = from_args(args)?; + + let lang: DataLocale = lang.parse().map_err(|e| { + Error::new(ErrorKind::InvalidOperation, "Invalid language").with_source(e) + })?; + + Ok(Value::from_object(TranslateFunc { + lang, + translator: Arc::clone(&self.translator), + })) + } +} + +struct TranslateFunc { + translator: Arc, + lang: DataLocale, +} + +impl std::fmt::Debug for TranslateFunc { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Translate") + .field("translator", &"..") + .field("lang", &self.lang) + .finish() + } +} + +impl std::fmt::Display for TranslateFunc { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("translate") + } +} + +impl Object for TranslateFunc { + fn call(self: &Arc, state: &State, args: &[Value]) -> Result { + let (key, kwargs): (&str, Kwargs) = from_args(args)?; + + let (message, _locale) = if let Some(count) = kwargs.get("count")? { + self.translator + .plural_with_fallback(self.lang.clone(), key, count) + .ok_or(Error::new( + ErrorKind::InvalidOperation, + "Missing translation", + ))? + } else { + self.translator + .message_with_fallback(self.lang.clone(), key) + .ok_or(Error::new( + ErrorKind::InvalidOperation, + "Missing translation", + ))? + }; + + let res: Result = kwargs + .args() + .map(|name| { + let value: Value = kwargs.get(name)?; + let value = serde_json::to_value(value).map_err(|e| { + Error::new(ErrorKind::InvalidOperation, "Could not serialize argument") + .with_source(e) + })?; + + Ok::<_, Error>(Argument::named(name.to_owned(), value)) + }) + .collect(); + let list = res?; + + let formatted = message.format_(&list).map_err(|e| { + Error::new(ErrorKind::InvalidOperation, "Could not format message").with_source(e) + })?; + + let mut buf = String::with_capacity(formatted.len()); + let mut output = make_string_output(&mut buf); + for part in formatted.parts() { + match part { + FormattedMessagePart::Text(text) => { + // Literal text, just write it + output.write_str(text)?; + } + FormattedMessagePart::Placeholder(placeholder) => { + // Placeholder, escape it + escape_formatter(&mut output, state, &placeholder.as_str().into())?; + } + } + } + + Ok(Value::from_safe_string(buf)) + } + + fn call_method( + self: &Arc, + _state: &State, + name: &str, + args: &[Value], + ) -> Result { + match name { + "relative_date" => { + let (date,): (String,) = from_args(args)?; + let date: chrono::DateTime = date.parse().map_err(|e| { + Error::new( + ErrorKind::InvalidOperation, + "Invalid date while calling function `relative_date`", + ) + .with_source(e) + })?; + + // TODO: grab the clock somewhere + #[allow(clippy::disallowed_methods)] + let now = chrono::Utc::now(); + + let diff = (date - now).num_days(); + + Ok(Value::from( + self.translator + .relative_date(&self.lang, diff) + .map_err(|_e| { + Error::new( + ErrorKind::InvalidOperation, + "Failed to format relative date", + ) + })?, + )) + } + + "short_time" => { + let (date,): (String,) = from_args(args)?; + let date: chrono::DateTime = date.parse().map_err(|e| { + Error::new( + ErrorKind::InvalidOperation, + "Invalid date while calling function `time`", + ) + .with_source(e) + })?; + + // TODO: we should use the user's timezone here + let time = date.time(); + + Ok(Value::from( + self.translator + .short_time(&self.lang, &TimeAdapter(time)) + .map_err(|_e| { + Error::new(ErrorKind::InvalidOperation, "Failed to format time") + })?, + )) + } + + _ => Err(Error::new( + ErrorKind::InvalidOperation, + "Invalid method on include_asset", + )), + } + } +} + +/// An adapter to make a [`Timelike`] implement [`IsoTimeInput`] +/// +/// [`Timelike`]: chrono::Timelike +/// [`IsoTimeInput`]: mas_i18n::icu_datetime::input::IsoTimeInput +struct TimeAdapter(T); + +impl mas_i18n::icu_datetime::input::IsoTimeInput for TimeAdapter { + fn hour(&self) -> Option { + let hour: usize = chrono::Timelike::hour(&self.0).try_into().ok()?; + hour.try_into().ok() + } + + fn minute(&self) -> Option { + let minute: usize = chrono::Timelike::minute(&self.0).try_into().ok()?; + minute.try_into().ok() + } + + fn second(&self) -> Option { + let second: usize = chrono::Timelike::second(&self.0).try_into().ok()?; + second.try_into().ok() + } + + fn nanosecond(&self) -> Option { + let nanosecond: usize = chrono::Timelike::nanosecond(&self.0).try_into().ok()?; + nanosecond.try_into().ok() + } +} + +#[derive(Default, Debug)] +struct IncludedAssetsTrackerInner { + preloaded: HashSet, + included: HashSet, +} + +impl IncludedAssetsTrackerInner { + /// Mark an asset as preloaded. Returns true if it was not already marked. + fn mark_preloaded(&mut self, asset: &Utf8Path) -> bool { + self.preloaded.insert(asset.to_owned()) + } + + /// Mark an asset as included. Returns true if it was not already marked. + fn mark_included(&mut self, asset: &Utf8Path) -> bool { + self.preloaded.insert(asset.to_owned()); + self.included.insert(asset.to_owned()) + } +} + +/// Helper to track included assets during a template render +#[derive(Default, Debug)] +struct IncludedAssetsTracker { + inner: Mutex, +} + +impl IncludedAssetsTracker { + fn lock(&self) -> std::sync::MutexGuard<'_, IncludedAssetsTrackerInner> { + // There is no reason for this mutex to ever get poisoned, so it's fine + // to unwrap here + self.inner.lock().unwrap() + } +} + +impl Object for IncludedAssetsTracker {} + +struct IncludeAsset { + url_builder: UrlBuilder, + vite_manifest: ViteManifest, +} + +impl std::fmt::Debug for IncludeAsset { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("IncludeAsset") + .field("url_builder", &self.url_builder.assets_base()) + .field("vite_manifest", &"..") + .finish() + } +} + +impl std::fmt::Display for IncludeAsset { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("include_asset") + } +} + +impl Object for IncludeAsset { + fn call(self: &Arc, state: &State, args: &[Value]) -> Result { + let (path,): (&str,) = from_args(args)?; + let path: &Utf8Path = path.into(); + + let assets_base: &Utf8Path = self.url_builder.assets_base().into(); + + // We store the list of assets we've already included and already preloaded in a + // 'temp' object. Those live throughout the template render and reset on each + // new render. + let tracker = + state.get_or_set_temp_object("included_assets_tracker", IncludedAssetsTracker::default); + let mut tracker = tracker.lock(); + + // Grab the main asset and its imports from the manifest + let (main, imported) = self.vite_manifest.find_assets(path).map_err(|e| { + Error::new( + ErrorKind::InvalidOperation, + format!("Invalid assets manifest while calling function `include_asset` with path = {path:?}: {e}"), + ) + })?; + + // We'll accumulate the output in this string + let mut output = String::new(); + match main.file_type() { + mas_spa::FileType::Script => { + let integrity = main.integrity_attr(); + let src = main.src(assets_base); + if tracker.mark_included(&src) { + writeln!( + output, + r#""# + ) + .unwrap(); + } + } + mas_spa::FileType::Stylesheet => { + let integrity = main.integrity_attr(); + let src = main.src(assets_base); + if tracker.mark_included(&src) { + writeln!( + output, + r#""# + ) + .unwrap(); + } + } + + mas_spa::FileType::Json => { + // When a JSON is included at the top level (a translation), we preload it + let src = main.src(assets_base); + if tracker.mark_preloaded(&src) { + writeln!(output, r#""#,).unwrap(); + } + } + + file_type => { + return Err(Error::new( + ErrorKind::InvalidOperation, + format!( + "The target asset is a {file_type:?} file, which is not supported by `include_asset`" + ), + )); + } + } + + for asset in imported { + let src = asset.src(assets_base); + match asset.file_type() { + mas_spa::FileType::Stylesheet => { + // Imported stylesheets are inserted directly, not just preloaded + if tracker.mark_included(&src) { + let integrity = asset.integrity_attr(); + writeln!( + output, + r#""# + ) + .unwrap(); + } + } + mas_spa::FileType::Script => { + if tracker.mark_preloaded(&src) { + let integrity = asset.integrity_attr(); + writeln!( + output, + r#""#, + ) + .unwrap(); + } + } + mas_spa::FileType::Png => { + if tracker.mark_preloaded(&src) { + writeln!( + output, + r#""#, + ) + .unwrap(); + } + } + mas_spa::FileType::Woff | mas_spa::FileType::Woff2 | mas_spa::FileType::Json => { + // Skip pre-loading fonts and JSON (translations) as it will + // lead to many wasted preloads. For translations, we only + // include them as preload if they are included on the + // top-level + } + } + } + + Ok(Value::from_safe_string(output.trim_end().to_owned())) + } +} + +struct FakeIncludeAsset {} + +impl std::fmt::Debug for FakeIncludeAsset { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("FakeIncludeAsset").finish() + } +} + +impl std::fmt::Display for FakeIncludeAsset { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("fake_include_asset") + } +} + +impl Object for FakeIncludeAsset { + fn call(self: &Arc, _state: &State, args: &[Value]) -> Result { + let (path,): (&str,) = from_args(args)?; + + Ok(Value::from_safe_string(format!( + "" + ))) + } +} + +#[derive(Debug, Default)] +struct Counter { + count: AtomicUsize, +} + +impl std::fmt::Display for Counter { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + self.count.load(std::sync::atomic::Ordering::Relaxed) + ) + } +} + +impl Object for Counter { + fn call_method( + self: &Arc, + _state: &State, + name: &str, + args: &[Value], + ) -> Result { + // None of the methods take any arguments + from_args::<()>(args)?; + + match name { + "reset" => { + self.count.store(0, std::sync::atomic::Ordering::Relaxed); + Ok(Value::UNDEFINED) + } + "next" => { + let old = self + .count + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); + Ok(Value::from(old)) + } + "peek" => Ok(Value::from( + self.count.load(std::sync::atomic::Ordering::Relaxed), + )), + _ => Err(Error::new( + ErrorKind::InvalidOperation, + "Invalid method on counter", + )), + } + } +} diff --git a/matrix-authentication-service/crates/templates/src/lib.rs b/matrix-authentication-service/crates/templates/src/lib.rs new file mode 100644 index 00000000..60b90555 --- /dev/null +++ b/matrix-authentication-service/crates/templates/src/lib.rs @@ -0,0 +1,546 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![deny(missing_docs)] +#![allow(clippy::module_name_repetitions)] + +//! Templates rendering + +use std::{ + collections::{BTreeMap, HashSet}, + sync::Arc, +}; + +use anyhow::Context as _; +use arc_swap::ArcSwap; +use camino::{Utf8Path, Utf8PathBuf}; +use mas_i18n::Translator; +use mas_router::UrlBuilder; +use mas_spa::ViteManifest; +use minijinja::{UndefinedBehavior, Value}; +use rand::Rng; +use serde::Serialize; +use thiserror::Error; +use tokio::task::JoinError; +use tracing::{debug, info}; +use walkdir::DirEntry; + +mod context; +mod forms; +mod functions; + +#[macro_use] +mod macros; + +pub use self::{ + context::{ + AccountInactiveContext, ApiDocContext, AppContext, CompatLoginPolicyViolationContext, + CompatSsoContext, ConsentContext, DeviceConsentContext, DeviceLinkContext, + DeviceLinkFormField, DeviceNameContext, EmailRecoveryContext, EmailVerificationContext, + EmptyContext, ErrorContext, FormPostContext, IndexContext, LoginContext, LoginFormField, + NotFoundContext, PasswordRegisterContext, PolicyViolationContext, PostAuthContext, + PostAuthContextInner, RecoveryExpiredContext, RecoveryFinishContext, + RecoveryFinishFormField, RecoveryProgressContext, RecoveryStartContext, + RecoveryStartFormField, RegisterContext, RegisterFormField, + RegisterStepsDisplayNameContext, RegisterStepsDisplayNameFormField, + RegisterStepsEmailInUseContext, RegisterStepsRegistrationTokenContext, + RegisterStepsRegistrationTokenFormField, RegisterStepsVerifyEmailContext, + RegisterStepsVerifyEmailFormField, SiteBranding, SiteConfigExt, SiteFeatures, + TemplateContext, UpstreamExistingLinkContext, UpstreamRegister, UpstreamRegisterFormField, + UpstreamSuggestLink, WithCaptcha, WithCsrf, WithLanguage, WithOptionalSession, WithSession, + }, + forms::{FieldError, FormError, FormField, FormState, ToFormState}, +}; +use crate::context::SampleIdentifier; + +/// Escape the given string for use in HTML +/// +/// It uses the same crate as the one used by the minijinja templates +#[must_use] +pub fn escape_html(input: &str) -> String { + v_htmlescape::escape(input).to_string() +} + +/// Wrapper around [`minijinja::Environment`] helping rendering the various +/// templates +#[derive(Debug, Clone)] +pub struct Templates { + environment: Arc>>, + translator: Arc>, + url_builder: UrlBuilder, + branding: SiteBranding, + features: SiteFeatures, + vite_manifest_path: Option, + translations_path: Utf8PathBuf, + path: Utf8PathBuf, + /// Whether template rendering is in strict mode (for testing, + /// until this can be rolled out in production.) + strict: bool, +} + +/// There was an issue while loading the templates +#[derive(Error, Debug)] +pub enum TemplateLoadingError { + /// I/O error + #[error(transparent)] + IO(#[from] std::io::Error), + + /// Failed to read the assets manifest + #[error("failed to read the assets manifest")] + ViteManifestIO(#[source] std::io::Error), + + /// Failed to deserialize the assets manifest + #[error("invalid assets manifest")] + ViteManifest(#[from] serde_json::Error), + + /// Failed to load the translations + #[error("failed to load the translations")] + Translations(#[from] mas_i18n::LoadError), + + /// Failed to traverse the filesystem + #[error("failed to traverse the filesystem")] + WalkDir(#[from] walkdir::Error), + + /// Encountered non-UTF-8 path + #[error("encountered non-UTF-8 path")] + NonUtf8Path(#[from] camino::FromPathError), + + /// Encountered non-UTF-8 path + #[error("encountered non-UTF-8 path")] + NonUtf8PathBuf(#[from] camino::FromPathBufError), + + /// Encountered invalid path + #[error("encountered invalid path")] + InvalidPath(#[from] std::path::StripPrefixError), + + /// Some templates failed to compile + #[error("could not load and compile some templates")] + Compile(#[from] minijinja::Error), + + /// Could not join blocking task + #[error("error from async runtime")] + Runtime(#[from] JoinError), + + /// There are essential templates missing + #[error("missing templates {missing:?}")] + MissingTemplates { + /// List of missing templates + missing: HashSet, + /// List of templates that were loaded + loaded: HashSet, + }, +} + +fn is_hidden(entry: &DirEntry) -> bool { + entry + .file_name() + .to_str() + .is_some_and(|s| s.starts_with('.')) +} + +impl Templates { + /// Load the templates from the given config + /// + /// # Parameters + /// + /// - `vite_manifest_path`: None if we are rendering resources for + /// reproducibility, in which case a dummy Vite manifest will be used. + /// + /// # Errors + /// + /// Returns an error if the templates could not be loaded from disk. + #[tracing::instrument( + name = "templates.load", + skip_all, + fields(%path), + )] + pub async fn load( + path: Utf8PathBuf, + url_builder: UrlBuilder, + vite_manifest_path: Option, + translations_path: Utf8PathBuf, + branding: SiteBranding, + features: SiteFeatures, + strict: bool, + ) -> Result { + let (translator, environment) = Self::load_( + &path, + url_builder.clone(), + vite_manifest_path.as_deref(), + &translations_path, + branding.clone(), + features, + strict, + ) + .await?; + Ok(Self { + environment: Arc::new(ArcSwap::new(environment)), + translator: Arc::new(ArcSwap::new(translator)), + path, + url_builder, + vite_manifest_path, + translations_path, + branding, + features, + strict, + }) + } + + async fn load_( + path: &Utf8Path, + url_builder: UrlBuilder, + vite_manifest_path: Option<&Utf8Path>, + translations_path: &Utf8Path, + branding: SiteBranding, + features: SiteFeatures, + strict: bool, + ) -> Result<(Arc, Arc>), TemplateLoadingError> { + let path = path.to_owned(); + let span = tracing::Span::current(); + + // Read the assets manifest from disk + let vite_manifest = if let Some(vite_manifest_path) = vite_manifest_path { + let raw_vite_manifest = tokio::fs::read(vite_manifest_path) + .await + .map_err(TemplateLoadingError::ViteManifestIO)?; + + Some( + serde_json::from_slice::(&raw_vite_manifest) + .map_err(TemplateLoadingError::ViteManifest)?, + ) + } else { + None + }; + + // Parse it + + let translations_path = translations_path.to_owned(); + let translator = + tokio::task::spawn_blocking(move || Translator::load_from_path(&translations_path)) + .await??; + let translator = Arc::new(translator); + + debug!(locales = ?translator.available_locales(), "Loaded translations"); + + let (loaded, mut env) = tokio::task::spawn_blocking(move || { + span.in_scope(move || { + let mut loaded: HashSet<_> = HashSet::new(); + let mut env = minijinja::Environment::new(); + // Don't allow use of undefined variables + env.set_undefined_behavior(if strict { + UndefinedBehavior::Strict + } else { + // For now, allow semi-strict, because we don't have total test coverage of + // tests and some tests rely on if conditions against sometimes-undefined + // variables + UndefinedBehavior::SemiStrict + }); + let root = path.canonicalize_utf8()?; + info!(%root, "Loading templates from filesystem"); + for entry in walkdir::WalkDir::new(&root) + .min_depth(1) + .into_iter() + .filter_entry(|e| !is_hidden(e)) + { + let entry = entry?; + if entry.file_type().is_file() { + let path = Utf8PathBuf::try_from(entry.into_path())?; + let Some(ext) = path.extension() else { + continue; + }; + + if ext == "html" || ext == "txt" || ext == "subject" { + let relative = path.strip_prefix(&root)?; + debug!(%relative, "Registering template"); + let template = std::fs::read_to_string(&path)?; + env.add_template_owned(relative.as_str().to_owned(), template)?; + loaded.insert(relative.as_str().to_owned()); + } + } + } + + Ok::<_, TemplateLoadingError>((loaded, env)) + }) + }) + .await??; + + env.add_global("branding", Value::from_object(branding)); + env.add_global("features", Value::from_object(features)); + + self::functions::register( + &mut env, + url_builder, + vite_manifest, + Arc::clone(&translator), + ); + + let env = Arc::new(env); + + let needed: HashSet<_> = TEMPLATES.into_iter().map(ToOwned::to_owned).collect(); + debug!(?loaded, ?needed, "Templates loaded"); + let missing: HashSet<_> = needed.difference(&loaded).cloned().collect(); + + if missing.is_empty() { + Ok((translator, env)) + } else { + Err(TemplateLoadingError::MissingTemplates { missing, loaded }) + } + } + + /// Reload the templates on disk + /// + /// # Errors + /// + /// Returns an error if the templates could not be reloaded from disk. + #[tracing::instrument( + name = "templates.reload", + skip_all, + fields(path = %self.path), + )] + pub async fn reload(&self) -> Result<(), TemplateLoadingError> { + let (translator, environment) = Self::load_( + &self.path, + self.url_builder.clone(), + self.vite_manifest_path.as_deref(), + &self.translations_path, + self.branding.clone(), + self.features, + self.strict, + ) + .await?; + + // Swap them + self.environment.store(environment); + self.translator.store(translator); + + Ok(()) + } + + /// Get the translator + #[must_use] + pub fn translator(&self) -> Arc { + self.translator.load_full() + } +} + +/// Failed to render a template +#[derive(Error, Debug)] +pub enum TemplateError { + /// Missing template + #[error("missing template {template:?}")] + Missing { + /// The name of the template being rendered + template: &'static str, + + /// The underlying error + #[source] + source: minijinja::Error, + }, + + /// Failed to render the template + #[error("could not render template {template:?}")] + Render { + /// The name of the template being rendered + template: &'static str, + + /// The underlying error + #[source] + source: minijinja::Error, + }, +} + +register_templates! { + /// Render the not found fallback page + pub fn render_not_found(WithLanguage) { "pages/404.html" } + + /// Render the frontend app + pub fn render_app(WithLanguage) { "app.html" } + + /// Render the Swagger API reference + pub fn render_swagger(ApiDocContext) { "swagger/doc.html" } + + /// Render the Swagger OAuth callback page + pub fn render_swagger_callback(ApiDocContext) { "swagger/oauth2-redirect.html" } + + /// Render the login page + pub fn render_login(WithLanguage>) { "pages/login.html" } + + /// Render the registration page + pub fn render_register(WithLanguage>) { "pages/register/index.html" } + + /// Render the password registration page + pub fn render_password_register(WithLanguage>>) { "pages/register/password.html" } + + /// Render the email verification page + pub fn render_register_steps_verify_email(WithLanguage>) { "pages/register/steps/verify_email.html" } + + /// Render the email in use page + pub fn render_register_steps_email_in_use(WithLanguage) { "pages/register/steps/email_in_use.html" } + + /// Render the display name page + pub fn render_register_steps_display_name(WithLanguage>) { "pages/register/steps/display_name.html" } + + /// Render the registration token page + pub fn render_register_steps_registration_token(WithLanguage>) { "pages/register/steps/registration_token.html" } + + /// Render the client consent page + pub fn render_consent(WithLanguage>>) { "pages/consent.html" } + + /// Render the policy violation page + pub fn render_policy_violation(WithLanguage>>) { "pages/policy_violation.html" } + + /// Render the compatibility login policy violation page + pub fn render_compat_login_policy_violation(WithLanguage>>) { "pages/compat_login_policy_violation.html" } + + /// Render the legacy SSO login consent page + pub fn render_sso_login(WithLanguage>>) { "pages/sso.html" } + + /// Render the home page + pub fn render_index(WithLanguage>>) { "pages/index.html" } + + /// Render the account recovery start page + pub fn render_recovery_start(WithLanguage>) { "pages/recovery/start.html" } + + /// Render the account recovery start page + pub fn render_recovery_progress(WithLanguage>) { "pages/recovery/progress.html" } + + /// Render the account recovery finish page + pub fn render_recovery_finish(WithLanguage>) { "pages/recovery/finish.html" } + + /// Render the account recovery link expired page + pub fn render_recovery_expired(WithLanguage>) { "pages/recovery/expired.html" } + + /// Render the account recovery link consumed page + pub fn render_recovery_consumed(WithLanguage) { "pages/recovery/consumed.html" } + + /// Render the account recovery disabled page + pub fn render_recovery_disabled(WithLanguage) { "pages/recovery/disabled.html" } + + /// Render the form used by the `form_post` response mode + pub fn render_form_post<#[sample(EmptyContext)] T: Serialize>(WithLanguage>) { "form_post.html" } + + /// Render the HTML error page + pub fn render_error(ErrorContext) { "pages/error.html" } + + /// Render the email recovery email (plain text variant) + pub fn render_email_recovery_txt(WithLanguage) { "emails/recovery.txt" } + + /// Render the email recovery email (HTML text variant) + pub fn render_email_recovery_html(WithLanguage) { "emails/recovery.html" } + + /// Render the email recovery subject + pub fn render_email_recovery_subject(WithLanguage) { "emails/recovery.subject" } + + /// Render the email verification email (plain text variant) + pub fn render_email_verification_txt(WithLanguage) { "emails/verification.txt" } + + /// Render the email verification email (HTML text variant) + pub fn render_email_verification_html(WithLanguage) { "emails/verification.html" } + + /// Render the email verification subject + pub fn render_email_verification_subject(WithLanguage) { "emails/verification.subject" } + + /// Render the upstream link mismatch message + pub fn render_upstream_oauth2_link_mismatch(WithLanguage>>) { "pages/upstream_oauth2/link_mismatch.html" } + + /// Render the upstream suggest link message + pub fn render_upstream_oauth2_suggest_link(WithLanguage>>) { "pages/upstream_oauth2/suggest_link.html" } + + /// Render the upstream register screen + pub fn render_upstream_oauth2_do_register(WithLanguage>) { "pages/upstream_oauth2/do_register.html" } + + /// Render the device code link page + pub fn render_device_link(WithLanguage) { "pages/device_link.html" } + + /// Render the device code consent page + pub fn render_device_consent(WithLanguage>>) { "pages/device_consent.html" } + + /// Render the 'account deactivated' page + pub fn render_account_deactivated(WithLanguage>) { "pages/account/deactivated.html" } + + /// Render the 'account locked' page + pub fn render_account_locked(WithLanguage>) { "pages/account/locked.html" } + + /// Render the 'account logged out' page + pub fn render_account_logged_out(WithLanguage>) { "pages/account/logged_out.html" } + + /// Render the automatic device name for OAuth 2.0 client + pub fn render_device_name(WithLanguage) { "device_name.txt" } +} + +impl Templates { + /// Render all templates with the generated samples to check if they render + /// properly. + /// + /// Returns the renders in a map whose keys are template names + /// and the values are lists of renders (according to the list + /// of samples). + /// Samples are stable across re-runs and can be used for + /// acceptance testing. + /// + /// # Errors + /// + /// Returns an error if any of the templates fails to render + pub fn check_render( + &self, + now: chrono::DateTime, + rng: &R, + ) -> anyhow::Result> { + check::all(self, now, rng) + } +} + +#[cfg(test)] +mod tests { + use rand::SeedableRng; + + use super::*; + + #[tokio::test] + async fn check_builtin_templates() { + #[allow(clippy::disallowed_methods)] + let now = chrono::Utc::now(); + let rng = rand_chacha::ChaCha8Rng::from_seed([42; 32]); + + let path = Utf8Path::new(env!("CARGO_MANIFEST_DIR")).join("../../templates/"); + let url_builder = UrlBuilder::new("https://example.com/".parse().unwrap(), None, None); + let branding = SiteBranding::new("example.com"); + let features = SiteFeatures { + password_login: true, + password_registration: true, + password_registration_email_required: true, + account_recovery: true, + login_with_email_allowed: true, + }; + let vite_manifest_path = + Utf8Path::new(env!("CARGO_MANIFEST_DIR")).join("../../frontend/dist/manifest.json"); + let translations_path = + Utf8Path::new(env!("CARGO_MANIFEST_DIR")).join("../../translations"); + + for use_real_vite_manifest in [true, false] { + let templates = Templates::load( + path.clone(), + url_builder.clone(), + // Check both renders against the real vite manifest and the 'dummy' vite manifest + // used for reproducible renders. + use_real_vite_manifest.then_some(vite_manifest_path.clone()), + translations_path.clone(), + branding.clone(), + features, + // Use strict mode in tests + true, + ) + .await + .unwrap(); + + // Check the renders are deterministic, when given the same rng + let render1 = templates.check_render(now, &rng).unwrap(); + let render2 = templates.check_render(now, &rng).unwrap(); + + assert_eq!(render1, render2); + } + } +} diff --git a/matrix-authentication-service/crates/templates/src/macros.rs b/matrix-authentication-service/crates/templates/src/macros.rs new file mode 100644 index 00000000..c6d5e930 --- /dev/null +++ b/matrix-authentication-service/crates/templates/src/macros.rs @@ -0,0 +1,128 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2021-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +/// Count the number of tokens. Used to have a fixed-sized array for the +/// templates list. +macro_rules! count { + () => (0_usize); + ( $x:tt $($xs:tt)* ) => (1_usize + count!($($xs)*)); +} + +/// Macro that helps generating helper function that renders a specific template +/// with a strongly-typed context. It also register the template in a static +/// array to help detecting missing templates at startup time. +/// +/// The syntax looks almost like a function to confuse syntax highlighter as +/// little as possible. +#[macro_export] +macro_rules! register_templates { + { + $( + extra = { $( $extra_template:expr ),* $(,)? }; + )? + + $( + // Match any attribute on the function, such as #[doc], #[allow(dead_code)], etc. + $( #[ $attr:meta ] )* + // The function name + pub fn $name:ident + // Optional list of generics. Taken from + // https://newbedev.com/rust-macro-accepting-type-with-generic-parameters + // For sample rendering, we also require a 'sample' generic parameter to be provided, + // using #[sample(Type)] attribute syntax + $(< $( #[sample( $generic_default:tt )] $lt:tt $( : $clt:tt $(+ $dlt:tt )* )? ),+ >)? + // Type of context taken by the template + ( $param:ty ) + { + // The name of the template file + $template:expr + } + )* + } => { + /// List of registered templates + static TEMPLATES: [&'static str; count!( $( $template )* )] = [ $( $template, )* ]; + + impl Templates { + $( + $(#[$attr])? + /// + /// # Errors + /// + /// Returns an error if the template fails to render. + pub fn $name + $(< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)? + (&self, context: &$param) + -> Result { + let ctx = ::minijinja::value::Value::from_serialize(context); + + let env = self.environment.load(); + let tmpl = env.get_template($template) + .map_err(|source| TemplateError::Missing { template: $template, source })?; + tmpl.render(ctx) + .map_err(|source| TemplateError::Render { template: $template, source }) + } + )* + } + + /// Helps rendering each template with sample data + pub mod check { + use super::*; + + /// Check and render all templates with all samples. + /// + /// Returns the sample renders. The keys in the map are the template names. + /// + /// # Errors + /// + /// Returns an error if any template fails to render with any of the sample. + pub(crate) fn all(templates: &Templates, now: chrono::DateTime, rng: &R) -> anyhow::Result<::std::collections::BTreeMap<(&'static str, SampleIdentifier), String>> { + let mut out = ::std::collections::BTreeMap::new(); + // TODO shouldn't the Rng be independent for each render? + $( + { + let mut rng = rng.clone(); + out.extend( + $name $(::< _ $( , $generic_default ),* >)? (templates, now, &mut rng)? + .into_iter() + .map(|(sample_identifier, rendered)| (($template, sample_identifier), rendered)) + ); + } + )* + + Ok(out) + } + + $( + #[doc = concat!("Render the `", $template, "` template with sample contexts")] + /// + /// Returns the sample renders. + /// + /// # Errors + /// + /// Returns an error if the template fails to render with any of the sample. + pub(crate) fn $name + < __R: Rng + Clone $( , $( $lt $( : $clt $(+ $dlt )* + TemplateContext )? ),+ )? > + (templates: &Templates, now: chrono::DateTime, rng: &mut __R) + -> anyhow::Result> { + let locales = templates.translator().available_locales(); + let samples: BTreeMap = TemplateContext::sample(now, rng, &locales); + + let name = $template; + let mut out = BTreeMap::new(); + for (sample_identifier, sample) in samples { + let context = serde_json::to_value(&sample)?; + ::tracing::info!(name, %context, "Rendering template"); + let rendered = templates. $name (&sample) + .with_context(|| format!("Failed to render sample template {name:?}-{sample_identifier:?} with context {context}"))?; + out.insert(sample_identifier, rendered); + } + + Ok(out) + } + )* + } + }; +} diff --git a/matrix-authentication-service/crates/tower/Cargo.toml b/matrix-authentication-service/crates/tower/Cargo.toml new file mode 100644 index 00000000..44a17c67 --- /dev/null +++ b/matrix-authentication-service/crates/tower/Cargo.toml @@ -0,0 +1,28 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[package] +name = "mas-tower" +description = "Tower layers used by the Matrix Authentication Service" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +publish.workspace = true + +[lints] +workspace = true + +[dependencies] +http.workspace = true +opentelemetry-http.workspace = true +opentelemetry-semantic-conventions.workspace = true +opentelemetry.workspace = true +pin-project-lite.workspace = true +tower.workspace = true +tracing-opentelemetry.workspace = true +tracing.workspace = true diff --git a/matrix-authentication-service/crates/tower/src/lib.rs b/matrix-authentication-service/crates/tower/src/lib.rs new file mode 100644 index 00000000..72fd97c9 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/lib.rs @@ -0,0 +1,27 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +#![allow(clippy::module_name_repetitions)] + +use std::sync::LazyLock; + +use opentelemetry::metrics::Meter; + +mod metrics; +mod trace_context; +mod tracing; +mod utils; + +pub use self::{metrics::*, trace_context::*, tracing::*, utils::*}; + +static METER: LazyLock = LazyLock::new(|| { + let scope = opentelemetry::InstrumentationScope::builder(env!("CARGO_PKG_NAME")) + .with_version(env!("CARGO_PKG_VERSION")) + .with_schema_url(opentelemetry_semantic_conventions::SCHEMA_URL) + .build(); + + opentelemetry::global::meter_with_scope(scope) +}); diff --git a/matrix-authentication-service/crates/tower/src/metrics/duration.rs b/matrix-authentication-service/crates/tower/src/metrics/duration.rs new file mode 100644 index 00000000..d8f3cc52 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/metrics/duration.rs @@ -0,0 +1,227 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::time::Instant; + +use opentelemetry::{KeyValue, metrics::Histogram}; +use pin_project_lite::pin_project; +use tower::{Layer, Service}; + +use crate::{METER, MetricsAttributes, utils::FnWrapper}; + +/// A [`Layer`] that records the duration of requests in milliseconds. +#[derive(Clone, Debug)] +pub struct DurationRecorderLayer { + histogram: Histogram, + on_request: OnRequest, + on_response: OnResponse, + on_error: OnError, +} + +impl DurationRecorderLayer { + /// Create a new [`DurationRecorderLayer`]. + #[must_use] + pub fn new(name: &'static str) -> Self { + let histogram = METER.u64_histogram(name).build(); + Self { + histogram, + on_request: (), + on_response: (), + on_error: (), + } + } +} + +impl DurationRecorderLayer { + /// Set the [`MetricsAttributes`] to use on request. + #[must_use] + pub fn on_request( + self, + on_request: NewOnRequest, + ) -> DurationRecorderLayer { + DurationRecorderLayer { + histogram: self.histogram, + on_request, + on_response: self.on_response, + on_error: self.on_error, + } + } + + #[must_use] + pub fn on_request_fn( + self, + on_request: F, + ) -> DurationRecorderLayer, OnResponse, OnError> + where + F: Fn(&T) -> Vec, + { + self.on_request(FnWrapper(on_request)) + } + + /// Set the [`MetricsAttributes`] to use on response. + #[must_use] + pub fn on_response( + self, + on_response: NewOnResponse, + ) -> DurationRecorderLayer { + DurationRecorderLayer { + histogram: self.histogram, + on_request: self.on_request, + on_response, + on_error: self.on_error, + } + } + + #[must_use] + pub fn on_response_fn( + self, + on_response: F, + ) -> DurationRecorderLayer, OnError> + where + F: Fn(&T) -> Vec, + { + self.on_response(FnWrapper(on_response)) + } + + /// Set the [`MetricsAttributes`] to use on error. + #[must_use] + pub fn on_error( + self, + on_error: NewOnError, + ) -> DurationRecorderLayer { + DurationRecorderLayer { + histogram: self.histogram, + on_request: self.on_request, + on_response: self.on_response, + on_error, + } + } + + #[must_use] + pub fn on_error_fn( + self, + on_error: F, + ) -> DurationRecorderLayer> + where + F: Fn(&T) -> Vec, + { + self.on_error(FnWrapper(on_error)) + } +} + +impl Layer + for DurationRecorderLayer +where + OnRequest: Clone, + OnResponse: Clone, + OnError: Clone, +{ + type Service = DurationRecorderService; + + fn layer(&self, inner: S) -> Self::Service { + DurationRecorderService { + inner, + histogram: self.histogram.clone(), + on_request: self.on_request.clone(), + on_response: self.on_response.clone(), + on_error: self.on_error.clone(), + } + } +} + +/// A middleware that records the duration of requests in milliseconds. +#[derive(Clone, Debug)] +pub struct DurationRecorderService { + inner: S, + histogram: Histogram, + on_request: OnRequest, + on_response: OnResponse, + on_error: OnError, +} + +pin_project! { + /// The future returned by the [`DurationRecorderService`]. + pub struct DurationRecorderFuture { + #[pin] + inner: F, + + start: Instant, + histogram: Histogram, + attributes_from_request: Vec, + from_response: OnResponse, + from_error: OnError, + } +} + +impl Future for DurationRecorderFuture +where + F: Future>, + OnResponse: MetricsAttributes, + OnError: MetricsAttributes, +{ + type Output = F::Output; + + fn poll( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let this = self.project(); + let result = std::task::ready!(this.inner.poll(cx)); + + // Measure the duration of the request. + let duration = this.start.elapsed(); + let duration_ms = duration.as_millis().try_into().unwrap_or(u64::MAX); + + // Collect the attributes from the request, response and error. + let mut attributes = this.attributes_from_request.clone(); + match &result { + Ok(response) => { + attributes.extend(this.from_response.attributes(response)); + } + Err(error) => { + attributes.extend(this.from_error.attributes(error)); + } + } + + this.histogram.record(duration_ms, &attributes); + std::task::Poll::Ready(result) + } +} + +impl Service + for DurationRecorderService +where + S: Service, + OnRequest: MetricsAttributes, + OnResponse: MetricsAttributes + Clone, + OnError: MetricsAttributes + Clone, +{ + type Response = S::Response; + type Error = S::Error; + type Future = DurationRecorderFuture; + + fn poll_ready( + &mut self, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.inner.poll_ready(cx) + } + + fn call(&mut self, request: R) -> Self::Future { + let start = Instant::now(); + let attributes_from_request = self.on_request.attributes(&request).collect(); + let inner = self.inner.call(request); + + DurationRecorderFuture { + inner, + start, + histogram: self.histogram.clone(), + attributes_from_request, + from_response: self.on_response.clone(), + from_error: self.on_error.clone(), + } + } +} diff --git a/matrix-authentication-service/crates/tower/src/metrics/in_flight.rs b/matrix-authentication-service/crates/tower/src/metrics/in_flight.rs new file mode 100644 index 00000000..02a21540 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/metrics/in_flight.rs @@ -0,0 +1,155 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use opentelemetry::{KeyValue, metrics::UpDownCounter}; +use pin_project_lite::pin_project; +use tower::{Layer, Service}; + +use crate::{METER, MetricsAttributes}; + +/// A [`Layer`] that records the number of in-flight requests. +/// +/// # Generic Parameters +/// +/// * `OnRequest`: A type that can extract attributes from a request. +#[derive(Clone, Debug)] +pub struct InFlightCounterLayer { + counter: UpDownCounter, + on_request: OnRequest, +} + +impl InFlightCounterLayer { + /// Create a new [`InFlightCounterLayer`]. + #[must_use] + pub fn new(name: &'static str) -> Self { + let counter = METER + .i64_up_down_counter(name) + .with_unit("{request}") + .with_description("The number of in-flight requests") + .build(); + + Self { + counter, + on_request: (), + } + } +} + +impl InFlightCounterLayer { + /// Set the [`MetricsAttributes`] to use. + #[must_use] + pub fn on_request(self, on_request: OnRequest) -> InFlightCounterLayer { + InFlightCounterLayer { + counter: self.counter, + on_request, + } + } +} + +impl Layer for InFlightCounterLayer +where + OnRequest: Clone, +{ + type Service = InFlightCounterService; + + fn layer(&self, inner: S) -> Self::Service { + InFlightCounterService { + inner, + counter: self.counter.clone(), + on_request: self.on_request.clone(), + } + } +} + +/// A middleware that records the number of in-flight requests. +/// +/// # Generic Parameters +/// +/// * `S`: The type of the inner service. +/// * `OnRequest`: A type that can extract attributes from a request. +#[derive(Clone, Debug)] +pub struct InFlightCounterService { + inner: S, + counter: UpDownCounter, + on_request: OnRequest, +} + +/// A guard that decrements the in-flight request count when dropped. +struct InFlightGuard { + counter: UpDownCounter, + attributes: Vec, +} + +impl InFlightGuard { + fn new(counter: UpDownCounter, attributes: Vec) -> Self { + counter.add(1, &attributes); + + Self { + counter, + attributes, + } + } +} + +impl Drop for InFlightGuard { + fn drop(&mut self) { + self.counter.add(-1, &self.attributes); + } +} + +pin_project! { + /// The future returned by [`InFlightCounterService`] + pub struct InFlightFuture { + guard: InFlightGuard, + + #[pin] + inner: F, + } +} + +impl Future for InFlightFuture +where + F: Future, +{ + type Output = F::Output; + + fn poll( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + self.project().inner.poll(cx) + } +} + +impl Service for InFlightCounterService +where + S: Service, + OnRequest: MetricsAttributes, +{ + type Response = S::Response; + type Error = S::Error; + type Future = InFlightFuture; + + fn poll_ready( + &mut self, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.inner.poll_ready(cx) + } + + fn call(&mut self, req: R) -> Self::Future { + // Extract attributes from the request. + let attributes = self.on_request.attributes(&req).collect(); + + // Increment the in-flight request count. + let guard = InFlightGuard::new(self.counter.clone(), attributes); + + // Call the inner service, and return a future that decrements the in-flight + // when dropped. + let inner = self.inner.call(req); + InFlightFuture { guard, inner } + } +} diff --git a/matrix-authentication-service/crates/tower/src/metrics/make_attributes.rs b/matrix-authentication-service/crates/tower/src/metrics/make_attributes.rs new file mode 100644 index 00000000..e4bc3ca6 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/metrics/make_attributes.rs @@ -0,0 +1,157 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use opentelemetry::{KeyValue, Value}; + +use crate::{FnWrapper, utils::KV}; + +/// Make metrics attributes from a type. +pub trait MetricsAttributes { + type Iter<'a>: Iterator + where + Self: 'a, + T: 'a; + + fn attributes<'a>(&'a self, t: &'a T) -> Self::Iter<'a>; +} + +pub fn metrics_attributes_fn(f: F) -> FnWrapper +where + F: Fn(&T) -> Vec + 'static, + T: 'static, +{ + FnWrapper(f) +} + +impl MetricsAttributes for FnWrapper +where + F: Fn(&T) -> Vec + 'static, + T: 'static, +{ + type Iter<'a> = std::vec::IntoIter; + + fn attributes<'a>(&'a self, t: &'a T) -> Self::Iter<'a> { + let values: Vec = self.0(t); + values.into_iter() + } +} + +impl MetricsAttributes for () +where + T: 'static, +{ + type Iter<'a> = std::iter::Empty; + + fn attributes(&self, _t: &T) -> Self::Iter<'_> { + std::iter::empty() + } +} + +impl MetricsAttributes for Vec +where + V: MetricsAttributes + 'static, + T: 'static, +{ + type Iter<'a> = Box + 'a>; + fn attributes<'a>(&'a self, t: &'a T) -> Self::Iter<'a> { + Box::new(self.iter().flat_map(|v| v.attributes(t))) + } +} + +impl MetricsAttributes for [V; N] +where + V: MetricsAttributes + 'static, + T: 'static, +{ + type Iter<'a> = Box + 'a>; + fn attributes<'a>(&'a self, t: &'a T) -> Self::Iter<'a> { + Box::new(self.iter().flat_map(|v| v.attributes(t))) + } +} + +impl MetricsAttributes for KV +where + V: Into + Clone + 'static, + T: 'static, +{ + type Iter<'a> = std::iter::Once; + fn attributes(&self, _t: &T) -> Self::Iter<'_> { + std::iter::once(KeyValue::new(self.0, self.1.clone().into())) + } +} + +impl MetricsAttributes for KeyValue +where + T: 'static, +{ + type Iter<'a> = std::iter::Once; + fn attributes(&self, _t: &T) -> Self::Iter<'_> { + std::iter::once(self.clone()) + } +} + +impl MetricsAttributes for Option +where + V: MetricsAttributes + 'static, + T: 'static, +{ + type Iter<'a> = std::iter::Flatten>>; + + fn attributes<'a>(&'a self, t: &'a T) -> Self::Iter<'a> { + self.as_ref().map(|v| v.attributes(t)).into_iter().flatten() + } +} + +macro_rules! chain_for { + // Sub-macro for reversing the list of types. + (@reverse ($( $reversed:ident ,)*)) => { + chain_for!(@build_chain $($reversed),*) + }; + (@reverse ($($reversed:ident,)*) $head:ident $(, $tail:ident)*) => { + chain_for!(@reverse ($head, $($reversed,)*) $($tail),*) + }; + + // Sub-macro for building the chain of iterators. + (@build_chain $last:ident) => { + $last::Iter<'a> + }; + (@build_chain $head:ident, $($tail:ident),*) => { + std::iter::Chain> + }; + + ($($idents:ident),+) => { + chain_for!(@reverse () $($idents),+) + }; +} + +macro_rules! impl_for_tuple { + ($first:ident $(,$rest:ident)*) => { + impl MetricsAttributes for ($first, $($rest,)*) + where + T: 'static, + $first: MetricsAttributes + 'static, + $($rest: MetricsAttributes + 'static,)* + { + type Iter<'a> = chain_for!($first $(, $rest)*); + fn attributes<'a>(&'a self, t: &'a T) -> Self::Iter<'a> { + #[allow(non_snake_case)] + let (head, $($rest,)*) = self; + head.attributes(t) + $(.chain($rest.attributes(t)))* + } + } + }; +} + +impl_for_tuple!(V1); +impl_for_tuple!(V1, V2); +impl_for_tuple!(V1, V2, V3); +impl_for_tuple!(V1, V2, V3, V4); +impl_for_tuple!(V1, V2, V3, V4, V5); +impl_for_tuple!(V1, V2, V3, V4, V5, V6); +impl_for_tuple!(V1, V2, V3, V4, V5, V6, V7); +impl_for_tuple!(V1, V2, V3, V4, V5, V6, V7, V8); +impl_for_tuple!(V1, V2, V3, V4, V5, V6, V7, V8, V9); diff --git a/matrix-authentication-service/crates/tower/src/metrics/mod.rs b/matrix-authentication-service/crates/tower/src/metrics/mod.rs new file mode 100644 index 00000000..9b9094c7 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/metrics/mod.rs @@ -0,0 +1,15 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod duration; +mod in_flight; +mod make_attributes; + +pub use self::{ + duration::{DurationRecorderFuture, DurationRecorderLayer, DurationRecorderService}, + in_flight::{InFlightCounterLayer, InFlightCounterService, InFlightFuture}, + make_attributes::{MetricsAttributes, metrics_attributes_fn}, +}; diff --git a/matrix-authentication-service/crates/tower/src/trace_context.rs b/matrix-authentication-service/crates/tower/src/trace_context.rs new file mode 100644 index 00000000..9180201c --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/trace_context.rs @@ -0,0 +1,101 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use http::Request; +use opentelemetry::propagation::Injector; +use opentelemetry_http::HeaderInjector; +use tower::{Layer, Service}; +use tracing::Span; +use tracing_opentelemetry::OpenTelemetrySpanExt; + +/// A trait to get an [`Injector`] from a request. +trait AsInjector { + type Injector<'a>: Injector + where + Self: 'a; + + fn as_injector(&mut self) -> Self::Injector<'_>; +} + +impl AsInjector for Request { + type Injector<'a> + = HeaderInjector<'a> + where + Self: 'a; + + fn as_injector(&mut self) -> Self::Injector<'_> { + HeaderInjector(self.headers_mut()) + } +} + +/// A [`Layer`] that adds a trace context to the request. +#[derive(Debug, Clone, Copy, Default)] +pub struct TraceContextLayer { + _private: (), +} + +impl TraceContextLayer { + /// Create a new [`TraceContextLayer`]. + #[must_use] + pub fn new() -> Self { + Self::default() + } +} + +impl Layer for TraceContextLayer { + type Service = TraceContextService; + + fn layer(&self, inner: S) -> Self::Service { + TraceContextService::new(inner) + } +} + +/// A [`Service`] that adds a trace context to the request. +#[derive(Debug, Clone)] +pub struct TraceContextService { + inner: S, +} + +impl TraceContextService { + /// Create a new [`TraceContextService`]. + pub fn new(inner: S) -> Self { + Self { inner } + } +} + +impl Service for TraceContextService +where + S: Service, + R: AsInjector, +{ + type Response = S::Response; + type Error = S::Error; + type Future = S::Future; + + fn poll_ready( + &mut self, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.inner.poll_ready(cx) + } + + fn call(&mut self, mut req: R) -> Self::Future { + // Get the `opentelemetry` context out of the `tracing` span. + let context = Span::current().context(); + + // Inject the trace context into the request. The block is there to ensure that + // the injector is dropped before calling the inner service, to avoid borrowing + // issues. + { + let mut injector = req.as_injector(); + opentelemetry::global::get_text_map_propagator(|propagator| { + propagator.inject_context(&context, &mut injector); + }); + } + + self.inner.call(req) + } +} diff --git a/matrix-authentication-service/crates/tower/src/tracing/enrich_span.rs b/matrix-authentication-service/crates/tower/src/tracing/enrich_span.rs new file mode 100644 index 00000000..1c726ba3 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/tracing/enrich_span.rs @@ -0,0 +1,106 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use tracing::{Span, Value}; + +use crate::utils::{FnWrapper, KV}; + +/// A trait for enriching a span with information a structure. +pub trait EnrichSpan { + fn enrich_span(&self, span: &Span, t: &T); +} + +impl EnrichSpan for FnWrapper +where + F: Fn(&Span, &T), +{ + fn enrich_span(&self, span: &Span, t: &T) { + (self.0)(span, t); + } +} + +/// Enrich span from a function. +#[must_use] +pub fn enrich_span_fn(f: F) -> FnWrapper +where + F: Fn(&Span, &T), +{ + FnWrapper(f) +} + +impl EnrichSpan for () { + fn enrich_span(&self, _span: &Span, _t: &T) {} +} + +impl EnrichSpan for KV +where + V: Value, +{ + fn enrich_span(&self, span: &Span, _t: &T) { + span.record(self.0, &self.1); + } +} + +/// A macro to implement [`EnrichSpan`] for a tuple of types that implement +/// [`EnrichSpan`]. +macro_rules! impl_for_tuple { + ($($T:ident),+) => { + impl EnrichSpan for ($($T,)+) + where + $($T: EnrichSpan),+ + { + fn enrich_span(&self, span: &Span, t: &T) { + #[allow(non_snake_case)] + let ($(ref $T,)+) = *self; + $( + $T.enrich_span(span, t); + )+ + } + } + }; +} + +impl_for_tuple!(T1); +impl_for_tuple!(T1, T2); +impl_for_tuple!(T1, T2, T3); +impl_for_tuple!(T1, T2, T3, T4); +impl_for_tuple!(T1, T2, T3, T4, T5); +impl_for_tuple!(T1, T2, T3, T4, T5, T6); +impl_for_tuple!(T1, T2, T3, T4, T5, T6, T7); +impl_for_tuple!(T1, T2, T3, T4, T5, T6, T7, T8); + +impl EnrichSpan for Option +where + T: EnrichSpan, +{ + fn enrich_span(&self, span: &Span, request: &R) { + if let Some(ref t) = *self { + t.enrich_span(span, request); + } + } +} + +impl EnrichSpan for [T; N] +where + T: EnrichSpan, +{ + fn enrich_span(&self, span: &Span, request: &R) { + for t in self { + t.enrich_span(span, request); + } + } +} + +impl EnrichSpan for Vec +where + T: EnrichSpan, +{ + fn enrich_span(&self, span: &Span, request: &R) { + for t in self { + t.enrich_span(span, request); + } + } +} diff --git a/matrix-authentication-service/crates/tower/src/tracing/future.rs b/matrix-authentication-service/crates/tower/src/tracing/future.rs new file mode 100644 index 00000000..bac62428 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/tracing/future.rs @@ -0,0 +1,63 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::task::ready; + +use pin_project_lite::pin_project; +use tracing::Span; + +pin_project! { + pub struct TraceFuture { + #[pin] + inner: F, + span: Span, + on_response: OnResponse, + on_error: OnError, + } +} + +impl TraceFuture { + pub fn new(inner: F, span: Span, on_response: OnResponse, on_error: OnError) -> Self { + Self { + inner, + span, + on_response, + on_error, + } + } +} + +impl Future for TraceFuture +where + F: Future>, + OnResponse: super::enrich_span::EnrichSpan, + OnError: super::enrich_span::EnrichSpan, +{ + type Output = Result; + + fn poll( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let this = self.project(); + + // Poll the inner future, with the span entered. This is effectively what + // [`tracing::Instrumented`] does. + let _guard = this.span.enter(); + let result = ready!(this.inner.poll(cx)); + + match &result { + Ok(response) => { + this.on_response.enrich_span(this.span, response); + } + Err(error) => { + this.on_error.enrich_span(this.span, error); + } + } + + std::task::Poll::Ready(result) + } +} diff --git a/matrix-authentication-service/crates/tower/src/tracing/layer.rs b/matrix-authentication-service/crates/tower/src/tracing/layer.rs new file mode 100644 index 00000000..1da80e90 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/tracing/layer.rs @@ -0,0 +1,97 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use tower::Layer; +use tracing::Span; + +use crate::{enrich_span_fn, make_span_fn, utils::FnWrapper}; + +#[derive(Clone, Debug)] +pub struct TraceLayer { + make_span: MakeSpan, + on_response: OnResponse, + on_error: OnError, +} + +impl TraceLayer> { + #[must_use] + pub fn from_fn(f: F) -> TraceLayer> + where + F: Fn(&T) -> Span, + { + TraceLayer::new(make_span_fn(f)) + } +} + +impl TraceLayer { + #[must_use] + pub fn new(make_span: MakeSpan) -> Self { + Self { + make_span, + on_response: (), + on_error: (), + } + } +} + +impl TraceLayer { + #[must_use] + pub fn on_response( + self, + on_response: NewOnResponse, + ) -> TraceLayer { + TraceLayer { + make_span: self.make_span, + on_response, + on_error: self.on_error, + } + } + + #[must_use] + pub fn on_response_fn(self, f: F) -> TraceLayer, OnError> + where + F: Fn(&Span, &T), + { + self.on_response(enrich_span_fn(f)) + } + + #[must_use] + pub fn on_error( + self, + on_error: NewOnError, + ) -> TraceLayer { + TraceLayer { + make_span: self.make_span, + on_response: self.on_response, + on_error, + } + } + + pub fn on_error_fn(self, f: F) -> TraceLayer> + where + F: Fn(&Span, &E), + { + self.on_error(enrich_span_fn(f)) + } +} + +impl Layer for TraceLayer +where + MakeSpan: Clone, + OnResponse: Clone, + OnError: Clone, +{ + type Service = super::service::TraceService; + + fn layer(&self, inner: S) -> Self::Service { + super::service::TraceService::new( + inner, + self.make_span.clone(), + self.on_response.clone(), + self.on_error.clone(), + ) + } +} diff --git a/matrix-authentication-service/crates/tower/src/tracing/make_span.rs b/matrix-authentication-service/crates/tower/src/tracing/make_span.rs new file mode 100644 index 00000000..ba03f47e --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/tracing/make_span.rs @@ -0,0 +1,64 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use tracing::Span; + +use super::enrich_span::EnrichSpan; +use crate::utils::FnWrapper; + +/// A trait for creating a span for a request. +pub trait MakeSpan { + fn make_span(&self, request: &R) -> Span; +} + +impl MakeSpan for FnWrapper +where + F: Fn(&R) -> Span, +{ + fn make_span(&self, request: &R) -> Span { + (self.0)(request) + } +} + +/// Make span from a function. +pub fn make_span_fn(f: F) -> FnWrapper +where + F: Fn(&R) -> Span, +{ + FnWrapper(f) +} + +/// A macro to implement [`MakeSpan`] for a tuple of types, where the first type +/// implements [`MakeSpan`] and the rest implement [`EnrichSpan`]. +macro_rules! impl_for_tuple { + (M, $($T:ident),+) => { + impl MakeSpan for (M, $($T),+) + where + M: MakeSpan, + $($T: EnrichSpan),+ + { + fn make_span(&self, request: &R) -> Span { + #[allow(non_snake_case)] + let (ref m, $(ref $T),+) = *self; + + let span = m.make_span(request); + $( + $T.enrich_span(&span, request); + )+ + span + } + } + }; +} + +impl_for_tuple!(M, T1); +impl_for_tuple!(M, T1, T2); +impl_for_tuple!(M, T1, T2, T3); +impl_for_tuple!(M, T1, T2, T3, T4); +impl_for_tuple!(M, T1, T2, T3, T4, T5); +impl_for_tuple!(M, T1, T2, T3, T4, T5, T6); +impl_for_tuple!(M, T1, T2, T3, T4, T5, T6, T7); +impl_for_tuple!(M, T1, T2, T3, T4, T5, T6, T7, T8); diff --git a/matrix-authentication-service/crates/tower/src/tracing/mod.rs b/matrix-authentication-service/crates/tower/src/tracing/mod.rs new file mode 100644 index 00000000..0c355e65 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/tracing/mod.rs @@ -0,0 +1,19 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +mod enrich_span; +mod future; +mod layer; +mod make_span; +mod service; + +pub use self::{ + enrich_span::{EnrichSpan, enrich_span_fn}, + future::TraceFuture, + layer::TraceLayer, + make_span::{MakeSpan, make_span_fn}, + service::TraceService, +}; diff --git a/matrix-authentication-service/crates/tower/src/tracing/service.rs b/matrix-authentication-service/crates/tower/src/tracing/service.rs new file mode 100644 index 00000000..9c348451 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/tracing/service.rs @@ -0,0 +1,59 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use tower::Service; + +use super::future::TraceFuture; + +#[derive(Clone, Debug)] +pub struct TraceService { + inner: S, + make_span: MakeSpan, + on_response: OnResponse, + on_error: OnError, +} + +impl TraceService { + /// Create a new [`TraceService`]. + #[must_use] + pub fn new(inner: S, make_span: MakeSpan, on_response: OnResponse, on_error: OnError) -> Self { + Self { + inner, + make_span, + on_response, + on_error, + } + } +} + +impl Service + for TraceService +where + S: Service, + MakeSpan: super::make_span::MakeSpan, + OnResponse: super::enrich_span::EnrichSpan + Clone, + OnError: super::enrich_span::EnrichSpan + Clone, +{ + type Response = S::Response; + type Error = S::Error; + type Future = TraceFuture; + + fn poll_ready( + &mut self, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.inner.poll_ready(cx) + } + + fn call(&mut self, request: R) -> Self::Future { + let span = self.make_span.make_span(&request); + let guard = span.enter(); + let inner = self.inner.call(request); + drop(guard); + + TraceFuture::new(inner, span, self.on_response.clone(), self.on_error.clone()) + } +} diff --git a/matrix-authentication-service/crates/tower/src/utils.rs b/matrix-authentication-service/crates/tower/src/utils.rs new file mode 100644 index 00000000..c9a6e9e4 --- /dev/null +++ b/matrix-authentication-service/crates/tower/src/utils.rs @@ -0,0 +1,90 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use opentelemetry::{KeyValue, Value}; +use tower::{Layer, Service}; + +/// A simple static key-value pair. +#[derive(Clone, Debug)] +pub struct KV(pub &'static str, pub V); + +impl From> for KeyValue +where + V: Into, +{ + fn from(value: KV) -> Self { + Self::new(value.0, value.1.into()) + } +} + +/// A wrapper around a function that can be used to generate a key-value pair, +/// make or enrich spans. +#[derive(Clone, Debug)] +pub struct FnWrapper(pub F); + +/// A no-op layer that has the request type bound. +#[derive(Clone, Copy, Debug)] +pub struct IdentityLayer { + _request: std::marker::PhantomData, +} + +impl Default for IdentityLayer { + fn default() -> Self { + Self { + _request: std::marker::PhantomData, + } + } +} + +/// A no-op service that has the request type bound. +#[derive(Clone, Copy, Debug)] +pub struct IdentityService { + _request: std::marker::PhantomData, + inner: S, +} + +impl Default for IdentityService +where + S: Default, +{ + fn default() -> Self { + Self { + _request: std::marker::PhantomData, + inner: S::default(), + } + } +} + +impl Layer for IdentityLayer { + type Service = IdentityService; + + fn layer(&self, inner: S) -> Self::Service { + IdentityService { + _request: std::marker::PhantomData, + inner, + } + } +} + +impl Service for IdentityService +where + S: Service, +{ + type Response = S::Response; + type Error = S::Error; + type Future = S::Future; + + fn poll_ready( + &mut self, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.inner.poll_ready(cx) + } + + fn call(&mut self, req: R) -> Self::Future { + self.inner.call(req) + } +} diff --git a/matrix-authentication-service/deny.toml b/matrix-authentication-service/deny.toml new file mode 100644 index 00000000..1671119c --- /dev/null +++ b/matrix-authentication-service/deny.toml @@ -0,0 +1,92 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +[graph] +targets = [ + { triple = "x86_64-unknown-linux-gnu" }, + { triple = "aarch64-unknown-linux-gnu" }, + { triple = "x86_64-apple-darwin" }, + { triple = "aarch64-apple-darwin" }, +] + +[advisories] +version = 2 +db-path = "~/.cargo/advisory-db" +db-urls = ["https://github.com/rustsec/advisory-db"] +ignore = [ + # RSA key extraction "Marvin Attack". This is only relevant when using + # PKCS#1 v1.5 encryption, which we don't + "RUSTSEC-2023-0071", +] + +[licenses] +version = 2 +allow = [ + "LicenseRef-Element-Commercial", + "0BSD", + "Apache-2.0 WITH LLVM-exception", + "Apache-2.0", + "BSD-2-Clause", + "BSD-3-Clause", + "ISC", + "MIT", + "MPL-2.0", + "OpenSSL", + "Unicode-3.0", + "Zlib", + "CDLA-Permissive-2.0", +] + +# Ring's license is a bit complicated, so we need to specify it manually +[[licenses.clarify]] +name = "ring" +version = "*" +expression = "MIT AND ISC AND OpenSSL" +license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }] + +[bans] +# List of crates to deny +deny = [ + # We should never depend on openssl + { name = "openssl" }, + { name = "openssl-sys" }, + { name = "native-tls" }, +] + +# We try to avoid duplicating crates and track exceptions here +multiple-versions = "deny" + +skip = [ + { name = "itertools", version = "0.13.0" }, # zxcvbn depends on this old version + { name = "hashbrown", version = "0.14.5" }, # a few crates depend on this old version + # a few dependencies depend on the 1.x version of thiserror + { name = "thiserror", version = "1.0.69" }, + { name = "thiserror-impl", version = "1.0.69" }, + # axum-macros, sqlx-macros and sea-query-attr use an old version + { name = "heck", version = "0.4.1" }, + # pad depends on an old version + { name = "unicode-width", version = "0.1.14" }, + # cron depends on this old version + # https://github.com/zslayton/cron/pull/137 + { name = "winnow", version = "0.6.20" }, + + # We are still mainly using rand 0.8 + { name = "rand", version = "0.8.5" }, + { name = "rand_chacha", version = "0.3.1" }, + { name = "rand_core", version = "0.6.4" }, + { name = "getrandom", version = "0.2.15" }, +] + +skip-tree = [] + +# We should never enable the (default) `oldtime` feature of `chrono` +[[bans.features]] +name = "chrono" +deny = ["oldtime"] + +[sources] +unknown-registry = "warn" +unknown-git = "warn" +allow-registry = ["https://github.com/rust-lang/crates.io-index"] diff --git a/matrix-authentication-service/docker-bake.hcl b/matrix-authentication-service/docker-bake.hcl new file mode 100644 index 00000000..3c3cac3a --- /dev/null +++ b/matrix-authentication-service/docker-bake.hcl @@ -0,0 +1,43 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. +# +// This is used to set the version reported by the binary through an environment +// variable. This is mainly useful when building out of a git context, like in +// CI, where we don't have the full commit history available +variable "VERGEN_GIT_DESCRIBE" {} + +// This is what is baked by GitHub Actions +group "default" { targets = ["regular", "debug"] } + +// Targets filled by GitHub Actions: one for the regular tag, one for the debug tag +target "docker-metadata-action" {} +target "docker-metadata-action-debug" {} + +// This sets the platforms and is further extended by GitHub Actions to set the +// output and the cache locations +target "base" { + args = { + // This is set so that when we use a git context, the .git directory is + // present, as we may be infering the version at build time out of it + BUILDKIT_CONTEXT_KEEP_GIT_DIR = 1 + + // Pass down the version from an external git describe source + VERGEN_GIT_DESCRIBE = "${VERGEN_GIT_DESCRIBE}" + } + + platforms = [ + "linux/amd64", + "linux/arm64", + ] +} + +target "regular" { + inherits = ["base", "docker-metadata-action"] +} + +target "debug" { + inherits = ["base", "docker-metadata-action-debug"] + target = "debug" +} diff --git a/matrix-authentication-service/docs/README.md b/matrix-authentication-service/docs/README.md new file mode 100644 index 00000000..914a1817 --- /dev/null +++ b/matrix-authentication-service/docs/README.md @@ -0,0 +1,20 @@ +# About this documentation + +This documentation is intended to give an overview of how the `matrix-authentication-service` (MAS) works, both from an admin perspective and from a developer perspective. + +MAS is an OAuth 2.0 and OpenID Provider server for Matrix. +It has been created to support the migration of Matrix to an OpenID Connect (OIDC) based authentication layer as per [MSC3861](https://github.com/matrix-org/matrix-doc/pull/3861). + +The documentation itself is built using [mdBook](https://rust-lang.github.io/mdBook/). +A hosted version is available at . + +## How the documentation is organized + +This documentation has four main sections: + +- The [installation guide](./setup/) will guide you through the process of setting up the `matrix-authentication-service` on your own infrastructure. +- The topics sections goes into more details about how the service works, like the [policy engine](./topics/policy.md) and how [authorization sessions](./topics/authorization.md) are managed. +- The reference documentation covers [configuration options](./reference/configuration.md), the [Admin API](./api/index.html), the [scopes](./reference/scopes.md) supported by the service, and the [command line interface](./reference/cli/). +- The developer documentation is intended for people who want to [contribute to the project](./development/contributing.md). Developers may also be interested in: + - Technical documentation for individual crates: [`rustdoc`](./rustdoc/mas_handlers/) + - UI components: [`storybook`](./storybook/) diff --git a/matrix-authentication-service/docs/SUMMARY.md b/matrix-authentication-service/docs/SUMMARY.md new file mode 100644 index 00000000..27182d5f --- /dev/null +++ b/matrix-authentication-service/docs/SUMMARY.md @@ -0,0 +1,52 @@ +# Summary + +# Introduction + +- [About this documentation](./README.md) + +# Setup + +- [Introduction](./setup/README.md) +- [Installation](./setup/installation.md) +- [General configuration](./setup/general.md) +- [Database setup](./setup/database.md) +- [Homeserver configuration](./setup/homeserver.md) +- [Configuring a reverse proxy](./setup/reverse-proxy.md) +- [Configure an upstream SSO provider](./setup/sso.md) +- [Running the service](./setup/running.md) +- [Migrating an existing homeserver](./setup/migration.md) + +# Topics + +- [Policy engine](./topics/policy.md) +- [Authorization and sessions](./topics/authorization.md) +- [Use the Admin API](./topics/admin-api.md) +- [Get an access token](./topics/access-token.md) + +# Reference + +- [Configuration file reference](./reference/configuration.md) +- [Admin API](./api/index.html) +- [OAuth 2.0 scopes](./reference/scopes.md) +- [Command line tool](./reference/cli/README.md) + - [`config`](./reference/cli/config.md) + - [`database`](./reference/cli/database.md) + - [`manage`](./reference/cli/manage.md) + - [`server`](./reference/cli/server.md) + - [`syn2mas`](./reference/cli/syn2mas.md) + - [`worker`](./reference/cli/worker.md) + - [`templates`](./reference/cli/templates.md) + - [`doctor`](./reference/cli/doctor.md) + +# Development + +- [Contributing](./development/contributing.md) +- [Releasing](./development/releasing.md) +- [Architecture](./development/architecture.md) +- [Database](./development/database.md) +- [Cleanup jobs](./development/cleanup-jobs.md) +- [Internal GraphQL API](./development/graphql.md) + +--- + +[Application Services login](./as-login.md) diff --git a/matrix-authentication-service/docs/api/index.html b/matrix-authentication-service/docs/api/index.html new file mode 100644 index 00000000..4bd8b082 --- /dev/null +++ b/matrix-authentication-service/docs/api/index.html @@ -0,0 +1,23 @@ + + + + + + API documentation + + + +

+ + + + diff --git a/matrix-authentication-service/docs/api/oauth2-redirect.html b/matrix-authentication-service/docs/api/oauth2-redirect.html new file mode 100644 index 00000000..518c01a2 --- /dev/null +++ b/matrix-authentication-service/docs/api/oauth2-redirect.html @@ -0,0 +1,80 @@ + + + + + API documentation: OAuth2 Redirect + + + + + diff --git a/matrix-authentication-service/docs/api/spec.json b/matrix-authentication-service/docs/api/spec.json new file mode 100644 index 00000000..ac56910b --- /dev/null +++ b/matrix-authentication-service/docs/api/spec.json @@ -0,0 +1,7449 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "Matrix Authentication Service admin API", + "version": "" + }, + "servers": [ + { + "url": "{base}", + "variables": { + "base": { + "default": "/", + "description": null + } + } + } + ], + "paths": { + "/api/admin/v1/site-config": { + "get": { + "tags": [ + "server" + ], + "summary": "Get informations about the configuration of this MAS instance", + "operationId": "siteConfig", + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SiteConfig" + }, + "example": { + "server_name": "example.com", + "password_login_enabled": true, + "password_registration_enabled": true, + "password_registration_email_required": true, + "registration_token_required": true, + "email_change_allowed": true, + "displayname_change_allowed": true, + "password_change_allowed": true, + "account_recovery_allowed": true, + "account_deactivation_allowed": true, + "captcha_enabled": true, + "minimum_password_complexity": 3 + } + } + } + } + } + } + }, + "/api/admin/v1/version": { + "get": { + "tags": [ + "server" + ], + "summary": "Get the version currently running", + "operationId": "version", + "responses": { + "200": { + "description": "", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Version" + }, + "example": { + "version": "v1.0.0" + } + } + } + } + } + } + }, + "/api/admin/v1/compat-sessions": { + "get": { + "tags": [ + "compat-session" + ], + "summary": "List compatibility sessions", + "description": "Retrieve a list of compatibility sessions.\nNote that by default, all sessions, including finished ones are returned, with the oldest first.\nUse the `filter[status]` parameter to filter the sessions by their status and `page[last]` parameter to retrieve the last N sessions.", + "operationId": "listCompatSessions", + "parameters": [ + { + "in": "query", + "name": "page[before]", + "description": "Retrieve the items before the given ID", + "schema": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[after]", + "description": "Retrieve the items after the given ID", + "schema": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[first]", + "description": "Retrieve the first N items", + "schema": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "page[last]", + "description": "Retrieve the last N items", + "schema": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "count", + "description": "Include the total number of items. Defaults to `true`.", + "schema": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[user]", + "description": "Retrieve the items for the given user", + "schema": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[user-session]", + "description": "Retrieve the items started from the given browser session", + "schema": { + "description": "Retrieve the items started from the given browser session", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[status]", + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all sessions, including finished ones.\n\n * `active`: Only retrieve active sessions\n\n * `finished`: Only retrieve finished sessions", + "schema": { + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all sessions, including finished ones.\n\n * `active`: Only retrieve active sessions\n\n * `finished`: Only retrieve finished sessions", + "anyOf": [ + { + "$ref": "#/components/schemas/CompatSessionStatus" + }, + { + "type": "null" + } + ] + }, + "style": "form" + } + ], + "responses": { + "200": { + "description": "Paginated response of compatibility sessions", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PaginatedResponse_for_CompatSession" + }, + "example": { + "meta": { + "count": 42 + }, + "data": [ + { + "type": "compat-session", + "id": "01040G2081040G2081040G2081", + "attributes": { + "user_id": "01040G2081040G2081040G2081", + "device_id": "AABBCCDDEE", + "user_session_id": "0H248H248H248H248H248H248H", + "redirect_uri": "https://example.com/redirect", + "created_at": "1970-01-01T00:00:00Z", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "1.2.3.4", + "finished_at": null, + "human_name": "Laptop" + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01040G2081040G2081040G2081" + }, + "meta": { + "page": { + "cursor": "01040G2081040G2081040G2081" + } + } + }, + { + "type": "compat-session", + "id": "02081040G2081040G2081040G2", + "attributes": { + "user_id": "01040G2081040G2081040G2081", + "device_id": "FFGGHHIIJJ", + "user_session_id": "0J289144GJ289144GJ289144GJ", + "redirect_uri": null, + "created_at": "1970-01-01T00:00:00Z", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "1.2.3.4", + "finished_at": "1970-01-01T00:00:00Z", + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/02081040G2081040G2081040G2" + }, + "meta": { + "page": { + "cursor": "02081040G2081040G2081040G2" + } + } + }, + { + "type": "compat-session", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "user_id": "01040G2081040G2081040G2081", + "device_id": null, + "user_session_id": null, + "redirect_uri": null, + "created_at": "1970-01-01T00:00:00Z", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "finished_at": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/030C1G60R30C1G60R30C1G60R3" + }, + "meta": { + "page": { + "cursor": "030C1G60R30C1G60R30C1G60R3" + } + } + } + ], + "links": { + "self": "/api/admin/v1/compat-sessions?page[first]=3", + "first": "/api/admin/v1/compat-sessions?page[first]=3", + "last": "/api/admin/v1/compat-sessions?page[last]=3", + "next": "/api/admin/v1/compat-sessions?page[after]=030C1G60R30C1G60R30C1G60R3&page[first]=3" + } + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/compat-sessions/{id}": { + "get": { + "tags": [ + "compat-session" + ], + "summary": "Get a compatibility session", + "operationId": "getCompatSession", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "Compatibility session was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_CompatSession" + }, + "example": { + "data": { + "type": "compat-session", + "id": "01040G2081040G2081040G2081", + "attributes": { + "user_id": "01040G2081040G2081040G2081", + "device_id": "AABBCCDDEE", + "user_session_id": "0H248H248H248H248H248H248H", + "redirect_uri": "https://example.com/redirect", + "created_at": "1970-01-01T00:00:00Z", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "1.2.3.4", + "finished_at": null, + "human_name": "Laptop" + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/compat-sessions/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "Compatibility session was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Compatibility session ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/compat-sessions/{id}/finish": { + "post": { + "tags": [ + "compat-session" + ], + "summary": "Finish a compatibility session", + "description": "Calling this endpoint will finish the compatibility session, preventing any further use. A job will be scheduled to sync the user's devices with the homeserver.", + "operationId": "finishCompatSession", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "Compatibility session was finished", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_CompatSession" + }, + "example": { + "data": { + "type": "compat-session", + "id": "02081040G2081040G2081040G2", + "attributes": { + "user_id": "01040G2081040G2081040G2081", + "device_id": "FFGGHHIIJJ", + "user_session_id": "0J289144GJ289144GJ289144GJ", + "redirect_uri": null, + "created_at": "1970-01-01T00:00:00Z", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "1.2.3.4", + "finished_at": "1970-01-01T00:00:00Z", + "human_name": null + }, + "links": { + "self": "/api/admin/v1/compat-sessions/02081040G2081040G2081040G2" + } + }, + "links": { + "self": "/api/admin/v1/compat-sessions/02081040G2081040G2081040G2/finish" + } + } + } + } + }, + "400": { + "description": "Session is already finished", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Compatibility session with ID 00000000000000000000000000 is already finished" + } + ] + } + } + } + }, + "404": { + "description": "Compatibility session was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Compatibility session with ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/oauth2-sessions": { + "get": { + "tags": [ + "oauth2-session" + ], + "summary": "List OAuth 2.0 sessions", + "description": "Retrieve a list of OAuth 2.0 sessions.\nNote that by default, all sessions, including finished ones are returned, with the oldest first.\nUse the `filter[status]` parameter to filter the sessions by their status and `page[last]` parameter to retrieve the last N sessions.", + "operationId": "listOAuth2Sessions", + "parameters": [ + { + "in": "query", + "name": "page[before]", + "description": "Retrieve the items before the given ID", + "schema": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[after]", + "description": "Retrieve the items after the given ID", + "schema": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[first]", + "description": "Retrieve the first N items", + "schema": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "page[last]", + "description": "Retrieve the last N items", + "schema": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "count", + "description": "Include the total number of items. Defaults to `true`.", + "schema": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[user]", + "description": "Retrieve the items for the given user", + "schema": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[client]", + "description": "Retrieve the items for the given client", + "schema": { + "description": "Retrieve the items for the given client", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[client-kind]", + "description": "Retrieve the items only for a specific client kind", + "schema": { + "description": "Retrieve the items only for a specific client kind", + "anyOf": [ + { + "$ref": "#/components/schemas/OAuth2ClientKind" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[user-session]", + "description": "Retrieve the items started from the given browser session", + "schema": { + "description": "Retrieve the items started from the given browser session", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[scope]", + "description": "Retrieve the items with the given scope", + "schema": { + "description": "Retrieve the items with the given scope", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[status]", + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all sessions, including finished ones.\n\n * `active`: Only retrieve active sessions\n\n * `finished`: Only retrieve finished sessions", + "schema": { + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all sessions, including finished ones.\n\n * `active`: Only retrieve active sessions\n\n * `finished`: Only retrieve finished sessions", + "anyOf": [ + { + "$ref": "#/components/schemas/OAuth2SessionStatus" + }, + { + "type": "null" + } + ] + }, + "style": "form" + } + ], + "responses": { + "200": { + "description": "Paginated response of OAuth 2.0 sessions", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PaginatedResponse_for_OAuth2Session" + }, + "example": { + "meta": { + "count": 42 + }, + "data": [ + { + "type": "oauth2-session", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": null, + "user_id": "02081040G2081040G2081040G2", + "user_session_id": "030C1G60R30C1G60R30C1G60R3", + "client_id": "040G2081040G2081040G208104", + "scope": "openid", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "127.0.0.1", + "human_name": "Laptop" + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/01040G2081040G2081040G2081" + }, + "meta": { + "page": { + "cursor": "01040G2081040G2081040G2081" + } + } + }, + { + "type": "oauth2-session", + "id": "02081040G2081040G2081040G2", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": null, + "user_id": null, + "user_session_id": null, + "client_id": "050M2GA1850M2GA1850M2GA185", + "scope": "urn:mas:admin", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null, + "human_name": null + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/02081040G2081040G2081040G2" + }, + "meta": { + "page": { + "cursor": "02081040G2081040G2081040G2" + } + } + }, + { + "type": "oauth2-session", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": "1970-01-01T00:00:00Z", + "user_id": "040G2081040G2081040G208104", + "user_session_id": "050M2GA1850M2GA1850M2GA185", + "client_id": "060R30C1G60R30C1G60R30C1G6", + "scope": "urn:matrix:client:api:*", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "127.0.0.1", + "human_name": null + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/030C1G60R30C1G60R30C1G60R3" + }, + "meta": { + "page": { + "cursor": "030C1G60R30C1G60R30C1G60R3" + } + } + } + ], + "links": { + "self": "/api/admin/v1/oauth2-sessions?page[first]=3", + "first": "/api/admin/v1/oauth2-sessions?page[first]=3", + "last": "/api/admin/v1/oauth2-sessions?page[last]=3", + "next": "/api/admin/v1/oauth2-sessions?page[after]=030C1G60R30C1G60R30C1G60R3&page[first]=3" + } + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + }, + "400": { + "description": "Invalid scope", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Invalid scope \"not a valid scope\" in filter parameters" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/oauth2-sessions/{id}": { + "get": { + "tags": [ + "oauth2-session" + ], + "summary": "Get an OAuth 2.0 session", + "operationId": "getOAuth2Session", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "OAuth 2.0 session was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_OAuth2Session" + }, + "example": { + "data": { + "type": "oauth2-session", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": null, + "user_id": "02081040G2081040G2081040G2", + "user_session_id": "030C1G60R30C1G60R30C1G60R3", + "client_id": "040G2081040G2081040G208104", + "scope": "openid", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "127.0.0.1", + "human_name": "Laptop" + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "OAuth 2.0 session was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "OAuth 2.0 session ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/oauth2-sessions/{id}/finish": { + "post": { + "tags": [ + "oauth2-session" + ], + "summary": "Finish an OAuth 2.0 session", + "description": "Calling this endpoint will finish the OAuth 2.0 session, preventing any further use. If the session has a user associated with it, a job will be scheduled to sync the user's devices with the homeserver.", + "operationId": "finishOAuth2Session", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "OAuth 2.0 session was finished", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_OAuth2Session" + }, + "example": { + "data": { + "type": "oauth2-session", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": "1970-01-01T00:00:00Z", + "user_id": "040G2081040G2081040G208104", + "user_session_id": "050M2GA1850M2GA1850M2GA185", + "client_id": "060R30C1G60R30C1G60R30C1G6", + "scope": "urn:matrix:client:api:*", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "127.0.0.1", + "human_name": null + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/030C1G60R30C1G60R30C1G60R3" + } + }, + "links": { + "self": "/api/admin/v1/oauth2-sessions/030C1G60R30C1G60R30C1G60R3/finish" + } + } + } + } + }, + "400": { + "description": "Session is already finished", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "OAuth 2.0 session with ID 00000000000000000000000000 is already finished" + } + ] + } + } + } + }, + "404": { + "description": "OAuth 2.0 session was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "OAuth 2.0 session with ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/personal-sessions": { + "get": { + "tags": [ + "personal-session" + ], + "summary": "List personal sessions", + "description": "Retrieve a list of personal sessions.\nNote that by default, all sessions, including revoked ones are returned, with the oldest first.\nUse the `filter[status]` parameter to filter the sessions by their status and `page[last]` parameter to retrieve the last N sessions.", + "operationId": "listPersonalSessions", + "parameters": [ + { + "in": "query", + "name": "page[before]", + "description": "Retrieve the items before the given ID", + "schema": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[after]", + "description": "Retrieve the items after the given ID", + "schema": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[first]", + "description": "Retrieve the first N items", + "schema": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "page[last]", + "description": "Retrieve the last N items", + "schema": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "count", + "description": "Include the total number of items. Defaults to `true`.", + "schema": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[owner_user]", + "description": "Filter by owner user ID", + "schema": { + "description": "Filter by owner user ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[owner_client]", + "description": "Filter by owner `OAuth2` client ID", + "schema": { + "description": "Filter by owner `OAuth2` client ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[actor_user]", + "description": "Filter by actor user ID", + "schema": { + "description": "Filter by actor user ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[scope]", + "description": "Retrieve the items with the given scope", + "schema": { + "description": "Retrieve the items with the given scope", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[status]", + "description": "Filter by session status", + "schema": { + "description": "Filter by session status", + "anyOf": [ + { + "$ref": "#/components/schemas/PersonalSessionStatus" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[expires_before]", + "description": "Filter by access token expiry date", + "schema": { + "description": "Filter by access token expiry date", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[expires_after]", + "description": "Filter by access token expiry date", + "schema": { + "description": "Filter by access token expiry date", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[expires]", + "description": "Filter by whether the access token has an expiry time", + "schema": { + "description": "Filter by whether the access token has an expiry time", + "type": [ + "boolean", + "null" + ] + }, + "style": "form" + } + ], + "responses": { + "200": { + "description": "Paginated response of personal sessions", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PaginatedResponse_for_PersonalSession" + }, + "example": { + "meta": { + "count": 3 + }, + "data": [ + { + "type": "personal-session", + "id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "attributes": { + "created_at": "2022-01-16T13:00:00Z", + "revoked_at": null, + "owner_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "owner_client_id": null, + "actor_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_name": "Alice's Development Token", + "scope": "openid urn:matrix:org.matrix.msc2967.client:api:*", + "last_active_at": "2022-01-16T15:30:00Z", + "last_active_ip": "192.168.1.100", + "expires_at": null + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + } + }, + { + "type": "personal-session", + "id": "01FSHN9AG0BJ6AC5HQ9X6H4RP5", + "attributes": { + "created_at": "2022-01-16T13:01:00Z", + "revoked_at": "2022-01-16T16:20:00Z", + "owner_user_id": "01FSHN9AG0NZAA6S4AF7CTV32F", + "owner_client_id": null, + "actor_user_id": "01FSHN9AG0NZAA6S4AF7CTV32F", + "human_name": "Bob's Mobile App", + "scope": "openid", + "last_active_at": "2022-01-16T16:03:20Z", + "last_active_ip": "10.0.0.50", + "expires_at": null + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0BJ6AC5HQ9X6H4RP5" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0BJ6AC5HQ9X6H4RP5" + } + } + }, + { + "type": "personal-session", + "id": "01FSHN9AG0CJ6AC5HQ9X6H4RP6", + "attributes": { + "created_at": "2022-01-16T13:02:00Z", + "revoked_at": null, + "owner_user_id": null, + "owner_client_id": "01FSHN9AG0DJ6AC5HQ9X6H4RP7", + "actor_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_name": "CI/CD Pipeline Token", + "scope": "openid urn:mas:admin", + "last_active_at": "2022-01-16T15:46:40Z", + "last_active_ip": "203.0.113.10", + "expires_at": "2022-01-24T04:36:40Z" + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0CJ6AC5HQ9X6H4RP6" + }, + "meta": { + "page": { + "cursor": "01FSHN9AG0CJ6AC5HQ9X6H4RP6" + } + } + } + ], + "links": { + "self": "/api/admin/v1/personal-sessions?page[first]=3", + "first": "/api/admin/v1/personal-sessions?page[first]=3", + "last": "/api/admin/v1/personal-sessions?page[last]=3", + "next": "/api/admin/v1/personal-sessions?page[after]=01FSHN9AG0CJ6AC5HQ9X6H4RP6&page[first]=3" + } + } + } + } + }, + "404": { + "description": "Client was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Client ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + }, + "post": { + "tags": [ + "personal-session" + ], + "summary": "Create a new personal session with personal access token", + "operationId": "createPersonalSession", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreatePersonalSessionRequest" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Personal session and personal access token were created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_PersonalSession" + } + } + } + }, + "400": { + "description": "Invalid scope provided", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Invalid scope" + } + ] + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/personal-sessions/{id}": { + "get": { + "tags": [ + "personal-session" + ], + "summary": "Get a personal session", + "operationId": "getPersonalSession", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "Personal session details", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_PersonalSession" + }, + "example": { + "data": { + "type": "personal-session", + "id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "attributes": { + "created_at": "2022-01-16T13:00:00Z", + "revoked_at": null, + "owner_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "owner_client_id": null, + "actor_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_name": "Alice's Development Token", + "scope": "openid urn:matrix:org.matrix.msc2967.client:api:*", + "last_active_at": "2022-01-16T15:30:00Z", + "last_active_ip": "192.168.1.100", + "expires_at": null + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + } + } + } + }, + "404": { + "description": "Personal session not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Personal session not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/personal-sessions/{id}/revoke": { + "post": { + "tags": [ + "personal-session" + ], + "summary": "Revoke a personal session", + "operationId": "revokePersonalSession", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "Personal session was revoked", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_PersonalSession" + }, + "example": { + "data": { + "type": "personal-session", + "id": "01FSHN9AG0AJ6AC5HQ9X6H4RP4", + "attributes": { + "created_at": "2022-01-16T13:00:00Z", + "revoked_at": null, + "owner_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "owner_client_id": null, + "actor_user_id": "01FSHN9AG0MZAA6S4AF7CTV32E", + "human_name": "Alice's Development Token", + "scope": "openid urn:matrix:org.matrix.msc2967.client:api:*", + "last_active_at": "2022-01-16T15:30:00Z", + "last_active_ip": "192.168.1.100", + "expires_at": null + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + }, + "links": { + "self": "/api/admin/v1/personal-sessions/01FSHN9AG0AJ6AC5HQ9X6H4RP4" + } + } + } + } + }, + "404": { + "description": "Personal session not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Personal session with ID 00000000000000000000000000 not found" + } + ] + } + } + } + }, + "409": { + "description": "Personal session already revoked", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Personal session with ID 00000000000000000000000000 is already revoked" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/personal-sessions/{id}/regenerate": { + "post": { + "tags": [ + "personal-session" + ], + "summary": "Regenerate a personal session by replacing its personal access token", + "operationId": "regeneratePersonalSession", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RegeneratePersonalSessionRequest" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Personal session was regenerated and a personal access token was created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_PersonalSession" + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/policy-data": { + "post": { + "tags": [ + "policy-data" + ], + "summary": "Set the current policy data", + "operationId": "setPolicyData", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetPolicyDataRequest" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Policy data was successfully set", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_PolicyData" + }, + "example": { + "data": { + "type": "policy-data", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "data": { + "hello": "world", + "foo": 42, + "bar": true + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01040G2081040G2081040G2081" + } + } + } + } + }, + "400": { + "description": "Invalid policy data", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Failed to instanciate policy with the provided data" + }, + { + "title": "invalid policy data" + }, + { + "title": "Failed to merge policy data objects" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/policy-data/latest": { + "get": { + "tags": [ + "policy-data" + ], + "summary": "Get the latest policy data", + "operationId": "getLatestPolicyData", + "responses": { + "200": { + "description": "Latest policy data was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_PolicyData" + }, + "example": { + "data": { + "type": "policy-data", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "data": { + "hello": "world", + "foo": 42, + "bar": true + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "No policy data was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "No policy data found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/policy-data/{id}": { + "get": { + "tags": [ + "policy-data" + ], + "summary": "Get policy data by ID", + "operationId": "getPolicyData", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "Policy data was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_PolicyData" + }, + "example": { + "data": { + "type": "policy-data", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "data": { + "hello": "world", + "foo": 42, + "bar": true + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/policy-data/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "Policy data was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Policy data with ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/users": { + "get": { + "tags": [ + "user" + ], + "summary": "List users", + "operationId": "listUsers", + "parameters": [ + { + "in": "query", + "name": "page[before]", + "description": "Retrieve the items before the given ID", + "schema": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[after]", + "description": "Retrieve the items after the given ID", + "schema": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[first]", + "description": "Retrieve the first N items", + "schema": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "page[last]", + "description": "Retrieve the last N items", + "schema": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "count", + "description": "Include the total number of items. Defaults to `true`.", + "schema": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[admin]", + "description": "Retrieve users with (or without) the `admin` flag set", + "schema": { + "description": "Retrieve users with (or without) the `admin` flag set", + "type": [ + "boolean", + "null" + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[legacy-guest]", + "description": "Retrieve users with (or without) the `legacy_guest` flag set", + "schema": { + "description": "Retrieve users with (or without) the `legacy_guest` flag set", + "type": [ + "boolean", + "null" + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[search]", + "description": "Retrieve users where the username matches contains the given string\n\n Note that this doesn't change the ordering of the result, which are\n still ordered by ID.", + "schema": { + "description": "Retrieve users where the username matches contains the given string\n\n Note that this doesn't change the ordering of the result, which are\n still ordered by ID.", + "type": [ + "string", + "null" + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[status]", + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all users, including locked ones.\n\n * `active`: Only retrieve active users\n\n * `locked`: Only retrieve locked users (includes deactivated users)\n\n * `deactivated`: Only retrieve deactivated users", + "schema": { + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all users, including locked ones.\n\n * `active`: Only retrieve active users\n\n * `locked`: Only retrieve locked users (includes deactivated users)\n\n * `deactivated`: Only retrieve deactivated users", + "anyOf": [ + { + "$ref": "#/components/schemas/UserStatus" + }, + { + "type": "null" + } + ] + }, + "style": "form" + } + ], + "responses": { + "200": { + "description": "Paginated response of users", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PaginatedResponse_for_User" + }, + "example": { + "meta": { + "count": 42 + }, + "data": [ + { + "type": "user", + "id": "01040G2081040G2081040G2081", + "attributes": { + "username": "alice", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081" + }, + "meta": { + "page": { + "cursor": "01040G2081040G2081040G2081" + } + } + }, + { + "type": "user", + "id": "02081040G2081040G2081040G2", + "attributes": { + "username": "bob", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": true, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/02081040G2081040G2081040G2" + }, + "meta": { + "page": { + "cursor": "02081040G2081040G2081040G2" + } + } + }, + { + "type": "user", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "username": "charlie", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": "1970-01-01T00:00:00Z", + "deactivated_at": null, + "admin": false, + "legacy_guest": true + }, + "links": { + "self": "/api/admin/v1/users/030C1G60R30C1G60R30C1G60R3" + }, + "meta": { + "page": { + "cursor": "030C1G60R30C1G60R30C1G60R3" + } + } + } + ], + "links": { + "self": "/api/admin/v1/users?page[first]=3", + "first": "/api/admin/v1/users?page[first]=3", + "last": "/api/admin/v1/users?page[last]=3", + "next": "/api/admin/v1/users?page[after]=030C1G60R30C1G60R30C1G60R3&page[first]=3" + } + } + } + } + } + } + }, + "post": { + "tags": [ + "user" + ], + "summary": "Create a new user", + "operationId": "createUser", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddUserRequest" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "User was created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_User" + }, + "example": { + "data": { + "type": "user", + "id": "01040G2081040G2081040G2081", + "attributes": { + "username": "alice", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081" + } + } + } + } + }, + "400": { + "description": "Username is not valid", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Username is not valid" + } + ] + } + } + } + }, + "409": { + "description": "Username is reserved by the homeserver", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Username is reserved by the homeserver" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/users/{id}": { + "get": { + "tags": [ + "user" + ], + "summary": "Get a user", + "operationId": "getUser", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "User was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_User" + }, + "example": { + "data": { + "type": "user", + "id": "01040G2081040G2081040G2081", + "attributes": { + "username": "alice", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/users/{id}/set-password": { + "post": { + "tags": [ + "user" + ], + "summary": "Set the password for a user", + "operationId": "setUserPassword", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SetUserPasswordRequest" + } + } + }, + "required": true + }, + "responses": { + "204": { + "description": "Password was set" + }, + "400": { + "description": "Password is too weak", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Password is too weak" + } + ] + } + } + } + }, + "403": { + "description": "Password auth is disabled in the server configuration", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Password auth is disabled" + } + ] + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/users/by-username/{username}": { + "get": { + "tags": [ + "user" + ], + "summary": "Get a user by its username (localpart)", + "operationId": "getUserByUsername", + "parameters": [ + { + "in": "path", + "name": "username", + "description": "The username (localpart) of the user to get", + "required": true, + "schema": { + "description": "The username (localpart) of the user to get", + "type": "string" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "User was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_User" + }, + "example": { + "data": { + "type": "user", + "id": "01040G2081040G2081040G2081", + "attributes": { + "username": "alice", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/users/by-username/alice" + } + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User with username \"alice\" not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/users/{id}/set-admin": { + "post": { + "tags": [ + "user" + ], + "summary": "Set whether a user can request admin", + "description": "Calling this endpoint will not have any effect on existing sessions, meaning that their existing sessions will keep admin access if they were granted it.", + "operationId": "userSetAdmin", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserSetAdminRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "User had admin privileges set", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_User" + }, + "example": { + "data": { + "type": "user", + "id": "02081040G2081040G2081040G2", + "attributes": { + "username": "bob", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": true, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/02081040G2081040G2081040G2" + } + }, + "links": { + "self": "/api/admin/v1/users/02081040G2081040G2081040G2/set-admin" + } + } + } + } + }, + "404": { + "description": "User ID not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/users/{id}/deactivate": { + "post": { + "tags": [ + "user" + ], + "summary": "Deactivate a user", + "description": "Calling this endpoint will deactivate the user, preventing them from doing any action.\nThis invalidates any existing session, and will ask the homeserver to make them leave all rooms.", + "operationId": "deactivateUser", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeactivateUserRequest" + } + } + } + }, + "responses": { + "200": { + "description": "User was deactivated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_User" + }, + "example": { + "data": { + "type": "user", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "username": "charlie", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": "1970-01-01T00:00:00Z", + "deactivated_at": null, + "admin": false, + "legacy_guest": true + }, + "links": { + "self": "/api/admin/v1/users/030C1G60R30C1G60R30C1G60R3" + } + }, + "links": { + "self": "/api/admin/v1/users/030C1G60R30C1G60R30C1G60R3/deactivate" + } + } + } + } + }, + "404": { + "description": "User ID not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/users/{id}/reactivate": { + "post": { + "tags": [ + "user" + ], + "summary": "Reactivate a user", + "description": "Calling this endpoint will reactivate a deactivated user.\nThis DOES NOT unlock a locked user, which is still prevented from doing any action until it is explicitly unlocked.", + "operationId": "reactivateUser", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "User was reactivated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_User" + }, + "example": { + "data": { + "type": "user", + "id": "01040G2081040G2081040G2081", + "attributes": { + "username": "alice", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081/reactivate" + } + } + } + } + }, + "404": { + "description": "User ID not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/users/{id}/lock": { + "post": { + "tags": [ + "user" + ], + "summary": "Lock a user", + "description": "Calling this endpoint will lock the user, preventing them from doing any action.\nThis DOES NOT invalidate any existing session, meaning that all their existing sessions will work again as soon as they get unlocked.", + "operationId": "lockUser", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "User was locked", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_User" + }, + "example": { + "data": { + "type": "user", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "username": "charlie", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": "1970-01-01T00:00:00Z", + "deactivated_at": null, + "admin": false, + "legacy_guest": true + }, + "links": { + "self": "/api/admin/v1/users/030C1G60R30C1G60R30C1G60R3" + } + }, + "links": { + "self": "/api/admin/v1/users/030C1G60R30C1G60R30C1G60R3/lock" + } + } + } + } + }, + "404": { + "description": "User ID not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/users/{id}/unlock": { + "post": { + "tags": [ + "user" + ], + "summary": "Unlock a user", + "description": "Calling this endpoint will lift restrictions on user actions that had imposed by locking.\nThis DOES NOT reactivate a deactivated user, which will remain unavailable until it is explicitly reactivated.", + "operationId": "unlockUser", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "User was unlocked", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_User" + }, + "example": { + "data": { + "type": "user", + "id": "01040G2081040G2081040G2081", + "attributes": { + "username": "alice", + "created_at": "1970-01-01T00:00:00Z", + "locked_at": null, + "deactivated_at": null, + "admin": false, + "legacy_guest": false + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/users/01040G2081040G2081040G2081/unlock" + } + } + } + } + }, + "404": { + "description": "User ID not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/user-emails": { + "get": { + "tags": [ + "user-email" + ], + "summary": "List user emails", + "description": "Retrieve a list of user emails.", + "operationId": "listUserEmails", + "parameters": [ + { + "in": "query", + "name": "page[before]", + "description": "Retrieve the items before the given ID", + "schema": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[after]", + "description": "Retrieve the items after the given ID", + "schema": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[first]", + "description": "Retrieve the first N items", + "schema": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "page[last]", + "description": "Retrieve the last N items", + "schema": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "count", + "description": "Include the total number of items. Defaults to `true`.", + "schema": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[user]", + "description": "Retrieve the items for the given user", + "schema": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[email]", + "description": "Retrieve the user email with the given email address", + "schema": { + "description": "Retrieve the user email with the given email address", + "type": [ + "string", + "null" + ] + }, + "style": "form" + } + ], + "responses": { + "200": { + "description": "Paginated response of user emails", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PaginatedResponse_for_UserEmail" + }, + "example": { + "meta": { + "count": 42 + }, + "data": [ + { + "type": "user-email", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "user_id": "02081040G2081040G2081040G2", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01040G2081040G2081040G2081" + }, + "meta": { + "page": { + "cursor": "01040G2081040G2081040G2081" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-emails?page[first]=1", + "first": "/api/admin/v1/user-emails?page[first]=1", + "last": "/api/admin/v1/user-emails?page[last]=1", + "next": "/api/admin/v1/user-emails?page[after]=01040G2081040G2081040G2081&page[first]=1" + } + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + }, + "post": { + "tags": [ + "user-email" + ], + "summary": "Add a user email", + "description": "Add an email address to a user.\nNote that this endpoint ignores any policy which would normally prevent the email from being added.", + "operationId": "addUserEmail", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddUserEmailRequest" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "User email was created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UserEmail" + }, + "example": { + "data": { + "type": "user-email", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "user_id": "02081040G2081040G2081040G2", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/user-emails/01040G2081040G2081040G2081" + } + } + } + } + }, + "409": { + "description": "Email already in use", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User email \"alice@example.com\" already in use" + } + ] + } + } + } + }, + "400": { + "description": "Email is not valid", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Email \"not a valid email\" is not valid" + }, + { + "title": "Missing domain or user" + } + ] + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/user-emails/{id}": { + "get": { + "tags": [ + "user-email" + ], + "summary": "Get a user email", + "operationId": "getUserEmail", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "User email was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UserEmail" + }, + "example": { + "data": { + "type": "user-email", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "user_id": "02081040G2081040G2081040G2", + "email": "alice@example.com" + }, + "links": { + "self": "/api/admin/v1/user-emails/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/user-emails/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "User email was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User email ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + }, + "delete": { + "tags": [ + "user-email" + ], + "summary": "Delete a user email", + "operationId": "deleteUserEmail", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "204": { + "description": "User email was found" + }, + "404": { + "description": "User email was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User email ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/user-sessions": { + "get": { + "tags": [ + "user-session" + ], + "summary": "List user sessions", + "description": "Retrieve a list of user sessions (browser sessions).\nNote that by default, all sessions, including finished ones are returned, with the oldest first.\nUse the `filter[status]` parameter to filter the sessions by their status and `page[last]` parameter to retrieve the last N sessions.", + "operationId": "listUserSessions", + "parameters": [ + { + "in": "query", + "name": "page[before]", + "description": "Retrieve the items before the given ID", + "schema": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[after]", + "description": "Retrieve the items after the given ID", + "schema": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[first]", + "description": "Retrieve the first N items", + "schema": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "page[last]", + "description": "Retrieve the last N items", + "schema": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "count", + "description": "Include the total number of items. Defaults to `true`.", + "schema": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[user]", + "description": "Retrieve the items for the given user", + "schema": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[status]", + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all sessions, including finished ones.\n\n * `active`: Only retrieve active sessions\n\n * `finished`: Only retrieve finished sessions", + "schema": { + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all sessions, including finished ones.\n\n * `active`: Only retrieve active sessions\n\n * `finished`: Only retrieve finished sessions", + "anyOf": [ + { + "$ref": "#/components/schemas/UserSessionStatus" + }, + { + "type": "null" + } + ] + }, + "style": "form" + } + ], + "responses": { + "200": { + "description": "Paginated response of user sessions", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PaginatedResponse_for_UserSession" + }, + "example": { + "meta": { + "count": 42 + }, + "data": [ + { + "type": "user-session", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": null, + "user_id": "02081040G2081040G2081040G2", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "127.0.0.1" + }, + "links": { + "self": "/api/admin/v1/user-sessions/01040G2081040G2081040G2081" + }, + "meta": { + "page": { + "cursor": "01040G2081040G2081040G2081" + } + } + }, + { + "type": "user-session", + "id": "02081040G2081040G2081040G2", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": null, + "user_id": "030C1G60R30C1G60R30C1G60R3", + "user_agent": null, + "last_active_at": null, + "last_active_ip": null + }, + "links": { + "self": "/api/admin/v1/user-sessions/02081040G2081040G2081040G2" + }, + "meta": { + "page": { + "cursor": "02081040G2081040G2081040G2" + } + } + }, + { + "type": "user-session", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": "1970-01-01T00:00:00Z", + "user_id": "040G2081040G2081040G208104", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "127.0.0.1" + }, + "links": { + "self": "/api/admin/v1/user-sessions/030C1G60R30C1G60R30C1G60R3" + }, + "meta": { + "page": { + "cursor": "030C1G60R30C1G60R30C1G60R3" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-sessions?page[first]=3", + "first": "/api/admin/v1/user-sessions?page[first]=3", + "last": "/api/admin/v1/user-sessions?page[last]=3", + "next": "/api/admin/v1/user-sessions?page[after]=030C1G60R30C1G60R30C1G60R3&page[first]=3" + } + } + } + } + }, + "404": { + "description": "User was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/user-sessions/{id}": { + "get": { + "tags": [ + "user-session" + ], + "summary": "Get a user session", + "operationId": "getUserSession", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "User session was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UserSession" + }, + "example": { + "data": { + "type": "user-session", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": null, + "user_id": "02081040G2081040G2081040G2", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "127.0.0.1" + }, + "links": { + "self": "/api/admin/v1/user-sessions/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/user-sessions/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "User session was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User session ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/user-sessions/{id}/finish": { + "post": { + "tags": [ + "user-session" + ], + "summary": "Finish a user session", + "description": "Calling this endpoint will finish the user session, preventing any further use.", + "operationId": "finishUserSession", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "User session was finished", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UserSession" + }, + "example": { + "data": { + "type": "user-session", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "finished_at": "1970-01-01T00:00:00Z", + "user_id": "040G2081040G2081040G208104", + "user_agent": "Mozilla/5.0", + "last_active_at": "1970-01-01T00:00:00Z", + "last_active_ip": "127.0.0.1" + }, + "links": { + "self": "/api/admin/v1/user-sessions/030C1G60R30C1G60R30C1G60R3" + } + }, + "links": { + "self": "/api/admin/v1/user-sessions/030C1G60R30C1G60R30C1G60R3/finish" + } + } + } + } + }, + "400": { + "description": "Session is already finished", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User session with ID 00000000000000000000000000 is already finished" + } + ] + } + } + } + }, + "404": { + "description": "User session was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User session with ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/user-registration-tokens": { + "get": { + "tags": [ + "user-registration-token" + ], + "summary": "List user registration tokens", + "operationId": "listUserRegistrationTokens", + "parameters": [ + { + "in": "query", + "name": "page[before]", + "description": "Retrieve the items before the given ID", + "schema": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[after]", + "description": "Retrieve the items after the given ID", + "schema": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[first]", + "description": "Retrieve the first N items", + "schema": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "page[last]", + "description": "Retrieve the last N items", + "schema": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "count", + "description": "Include the total number of items. Defaults to `true`.", + "schema": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[used]", + "description": "Retrieve tokens that have (or have not) been used at least once", + "schema": { + "description": "Retrieve tokens that have (or have not) been used at least once", + "type": [ + "boolean", + "null" + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[revoked]", + "description": "Retrieve tokens that are (or are not) revoked", + "schema": { + "description": "Retrieve tokens that are (or are not) revoked", + "type": [ + "boolean", + "null" + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[expired]", + "description": "Retrieve tokens that are (or are not) expired", + "schema": { + "description": "Retrieve tokens that are (or are not) expired", + "type": [ + "boolean", + "null" + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[valid]", + "description": "Retrieve tokens that are (or are not) valid\n\n Valid means that the token has not expired, is not revoked, and has not\n reached its usage limit.", + "schema": { + "description": "Retrieve tokens that are (or are not) valid\n\n Valid means that the token has not expired, is not revoked, and has not\n reached its usage limit.", + "type": [ + "boolean", + "null" + ] + }, + "style": "form" + } + ], + "responses": { + "200": { + "description": "Paginated response of registration tokens", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PaginatedResponse_for_UserRegistrationToken" + }, + "example": { + "meta": { + "count": 42 + }, + "data": [ + { + "type": "user-registration_token", + "id": "01040G2081040G2081040G2081", + "attributes": { + "token": "abc123def456", + "valid": true, + "usage_limit": 10, + "times_used": 5, + "created_at": "1970-01-01T00:00:00Z", + "last_used_at": "1970-01-01T00:00:00Z", + "expires_at": "1970-01-31T00:00:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081" + }, + "meta": { + "page": { + "cursor": "01040G2081040G2081040G2081" + } + } + }, + { + "type": "user-registration_token", + "id": "02081040G2081040G2081040G2", + "attributes": { + "token": "xyz789abc012", + "valid": false, + "usage_limit": null, + "times_used": 0, + "created_at": "1970-01-01T00:00:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": "1970-01-01T00:00:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/02081040G2081040G2081040G2" + }, + "meta": { + "page": { + "cursor": "02081040G2081040G2081040G2" + } + } + } + ], + "links": { + "self": "/api/admin/v1/user-registration-tokens?page[first]=2", + "first": "/api/admin/v1/user-registration-tokens?page[first]=2", + "last": "/api/admin/v1/user-registration-tokens?page[last]=2", + "next": "/api/admin/v1/user-registration-tokens?page[after]=02081040G2081040G2081040G2&page[first]=2" + } + } + } + } + } + } + }, + "post": { + "tags": [ + "user-registration-token" + ], + "summary": "Create a new user registration token", + "operationId": "addUserRegistrationToken", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddUserRegistrationTokenRequest" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "A new user registration token was created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UserRegistrationToken" + }, + "example": { + "data": { + "type": "user-registration_token", + "id": "01040G2081040G2081040G2081", + "attributes": { + "token": "abc123def456", + "valid": true, + "usage_limit": 10, + "times_used": 5, + "created_at": "1970-01-01T00:00:00Z", + "last_used_at": "1970-01-01T00:00:00Z", + "expires_at": "1970-01-31T00:00:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081" + } + } + } + } + } + } + } + }, + "/api/admin/v1/user-registration-tokens/{id}": { + "get": { + "tags": [ + "user-registration-token" + ], + "summary": "Get a user registration token", + "operationId": "getUserRegistrationToken", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "Registration token was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UserRegistrationToken" + }, + "example": { + "data": { + "type": "user-registration_token", + "id": "01040G2081040G2081040G2081", + "attributes": { + "token": "abc123def456", + "valid": true, + "usage_limit": 10, + "times_used": 5, + "created_at": "1970-01-01T00:00:00Z", + "last_used_at": "1970-01-01T00:00:00Z", + "expires_at": "1970-01-31T00:00:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "Registration token was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Registration token with ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + }, + "put": { + "tags": [ + "user-registration-token" + ], + "summary": "Update a user registration token", + "description": "Update properties of a user registration token such as expiration and usage limit. To set a field to null (removing the limit/expiration), include the field with a null value. To leave a field unchanged, omit it from the request body.", + "operationId": "updateUserRegistrationToken", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EditUserRegistrationTokenRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Registration token was updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UserRegistrationToken" + }, + "example": { + "data": { + "type": "user-registration_token", + "id": "01040G2081040G2081040G2081", + "attributes": { + "token": "abc123def456", + "valid": true, + "usage_limit": 10, + "times_used": 5, + "created_at": "1970-01-01T00:00:00Z", + "last_used_at": "1970-01-01T00:00:00Z", + "expires_at": "1970-01-31T00:00:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "Registration token was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Registration token with ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/user-registration-tokens/{id}/revoke": { + "post": { + "tags": [ + "user-registration-token" + ], + "summary": "Revoke a user registration token", + "description": "Calling this endpoint will revoke the user registration token, preventing it from being used for new registrations.", + "operationId": "revokeUserRegistrationToken", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "Registration token was revoked", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UserRegistrationToken" + }, + "example": { + "data": { + "type": "user-registration_token", + "id": "02081040G2081040G2081040G2", + "attributes": { + "token": "xyz789abc012", + "valid": false, + "usage_limit": null, + "times_used": 0, + "created_at": "1970-01-01T00:00:00Z", + "last_used_at": null, + "expires_at": null, + "revoked_at": "1970-01-01T00:00:00Z" + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/02081040G2081040G2081040G2" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/02081040G2081040G2081040G2/revoke" + } + } + } + } + }, + "400": { + "description": "Token is already revoked", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Registration token with ID 00000000000000000000000000 is already revoked" + } + ] + } + } + } + }, + "404": { + "description": "Registration token was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Registration token with ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/user-registration-tokens/{id}/unrevoke": { + "post": { + "tags": [ + "user-registration-token" + ], + "summary": "Unrevoke a user registration token", + "description": "Calling this endpoint will unrevoke a previously revoked user registration token, allowing it to be used for registrations again (subject to its usage limits and expiration).", + "operationId": "unrevokeUserRegistrationToken", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "Registration token was unrevoked", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UserRegistrationToken" + }, + "example": { + "data": { + "type": "user-registration_token", + "id": "01040G2081040G2081040G2081", + "attributes": { + "token": "abc123def456", + "valid": true, + "usage_limit": 10, + "times_used": 5, + "created_at": "1970-01-01T00:00:00Z", + "last_used_at": "1970-01-01T00:00:00Z", + "expires_at": "1970-01-31T00:00:00Z", + "revoked_at": null + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/user-registration-tokens/01040G2081040G2081040G2081/unrevoke" + } + } + } + } + }, + "400": { + "description": "Token is not revoked", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Registration token with ID 00000000000000000000000000 is not revoked" + } + ] + } + } + } + }, + "404": { + "description": "Registration token was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Registration token with ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/upstream-oauth-links": { + "get": { + "tags": [ + "upstream-oauth-link" + ], + "summary": "List upstream OAuth 2.0 links", + "description": "Retrieve a list of upstream OAuth 2.0 links.", + "operationId": "listUpstreamOAuthLinks", + "parameters": [ + { + "in": "query", + "name": "page[before]", + "description": "Retrieve the items before the given ID", + "schema": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[after]", + "description": "Retrieve the items after the given ID", + "schema": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[first]", + "description": "Retrieve the first N items", + "schema": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "page[last]", + "description": "Retrieve the last N items", + "schema": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "count", + "description": "Include the total number of items. Defaults to `true`.", + "schema": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[user]", + "description": "Retrieve the items for the given user", + "schema": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[provider]", + "description": "Retrieve the items for the given provider", + "schema": { + "description": "Retrieve the items for the given provider", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[subject]", + "description": "Retrieve the items with the given subject", + "schema": { + "description": "Retrieve the items with the given subject", + "type": [ + "string", + "null" + ] + }, + "style": "form" + } + ], + "responses": { + "200": { + "description": "Paginated response of upstream OAuth 2.0 links", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PaginatedResponse_for_UpstreamOAuthLink" + }, + "example": { + "meta": { + "count": 42 + }, + "data": [ + { + "type": "upstream-oauth-link", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "provider_id": "02081040G2081040G2081040G2", + "subject": "john-42", + "user_id": "030C1G60R30C1G60R30C1G60R3", + "human_account_name": "john.doe@example.com" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01040G2081040G2081040G2081" + }, + "meta": { + "page": { + "cursor": "01040G2081040G2081040G2081" + } + } + }, + { + "type": "upstream-oauth-link", + "id": "02081040G2081040G2081040G2", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "provider_id": "030C1G60R30C1G60R30C1G60R3", + "subject": "jane-123", + "user_id": null, + "human_account_name": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/02081040G2081040G2081040G2" + }, + "meta": { + "page": { + "cursor": "02081040G2081040G2081040G2" + } + } + }, + { + "type": "upstream-oauth-link", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "provider_id": "040G2081040G2081040G208104", + "subject": "bob@social.example.com", + "user_id": "050M2GA1850M2GA1850M2GA185", + "human_account_name": "bob" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/030C1G60R30C1G60R30C1G60R3" + }, + "meta": { + "page": { + "cursor": "030C1G60R30C1G60R30C1G60R3" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-links?page[first]=3", + "first": "/api/admin/v1/upstream-oauth-links?page[first]=3", + "last": "/api/admin/v1/upstream-oauth-links?page[last]=3", + "next": "/api/admin/v1/upstream-oauth-links?page[after]=030C1G60R30C1G60R30C1G60R3&page[first]=3" + } + } + } + } + }, + "404": { + "description": "User or provider was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + }, + "post": { + "tags": [ + "upstream-oauth-link" + ], + "summary": "Add an upstream OAuth 2.0 link", + "operationId": "addUpstreamOAuthLink", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddUpstreamOauthLinkRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "An existing Upstream OAuth 2.0 link was associated to a user", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UpstreamOAuthLink" + }, + "example": { + "data": { + "type": "upstream-oauth-link", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "provider_id": "02081040G2081040G2081040G2", + "subject": "john-42", + "user_id": "030C1G60R30C1G60R30C1G60R3", + "human_account_name": "john.doe@example.com" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01040G2081040G2081040G2081" + } + } + } + } + }, + "201": { + "description": "A new Upstream OAuth 2.0 link was created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UpstreamOAuthLink" + }, + "example": { + "data": { + "type": "upstream-oauth-link", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "provider_id": "02081040G2081040G2081040G2", + "subject": "john-42", + "user_id": "030C1G60R30C1G60R30C1G60R3", + "human_account_name": "john.doe@example.com" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01040G2081040G2081040G2081" + } + } + } + } + }, + "409": { + "description": "The subject from the provider is already linked to another user", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Upstream Oauth 2.0 Provider ID 01040G2081040G2081040G2081 with subject subject1 is already linked to a user" + } + ] + } + } + } + }, + "404": { + "description": "User or provider was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "User ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/upstream-oauth-links/{id}": { + "get": { + "tags": [ + "upstream-oauth-link" + ], + "summary": "Get an upstream OAuth 2.0 link", + "operationId": "getUpstreamOAuthLink", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "Upstream OAuth 2.0 link was found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UpstreamOAuthLink" + }, + "example": { + "data": { + "type": "upstream-oauth-link", + "id": "01040G2081040G2081040G2081", + "attributes": { + "created_at": "1970-01-01T00:00:00Z", + "provider_id": "02081040G2081040G2081040G2", + "subject": "john-42", + "user_id": "030C1G60R30C1G60R30C1G60R3", + "human_account_name": "john.doe@example.com" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-links/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "Upstream OAuth 2.0 link was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Upstream OAuth 2.0 Link ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + }, + "delete": { + "tags": [ + "upstream-oauth-link" + ], + "summary": "Delete an upstream OAuth 2.0 link", + "operationId": "deleteUpstreamOAuthLink", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "204": { + "description": "Upstream OAuth 2.0 link was deleted" + }, + "404": { + "description": "Upstream OAuth 2.0 link was not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + }, + "example": { + "errors": [ + { + "title": "Upstream OAuth 2.0 Link ID 00000000000000000000000000 not found" + } + ] + } + } + } + } + } + } + }, + "/api/admin/v1/upstream-oauth-providers": { + "get": { + "tags": [ + "upstream-oauth-provider" + ], + "summary": "List upstream OAuth 2.0 providers", + "operationId": "listUpstreamOAuthProviders", + "parameters": [ + { + "in": "query", + "name": "page[before]", + "description": "Retrieve the items before the given ID", + "schema": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[after]", + "description": "Retrieve the items after the given ID", + "schema": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "page[first]", + "description": "Retrieve the first N items", + "schema": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "page[last]", + "description": "Retrieve the last N items", + "schema": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "style": "form" + }, + { + "in": "query", + "name": "count", + "description": "Include the total number of items. Defaults to `true`.", + "schema": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + }, + "style": "form" + }, + { + "in": "query", + "name": "filter[enabled]", + "description": "Retrieve providers that are (or are not) enabled", + "schema": { + "description": "Retrieve providers that are (or are not) enabled", + "type": [ + "boolean", + "null" + ] + }, + "style": "form" + } + ], + "responses": { + "200": { + "description": "Paginated response of upstream OAuth 2.0 providers", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PaginatedResponse_for_UpstreamOAuthProvider" + }, + "example": { + "meta": { + "count": 42 + }, + "data": [ + { + "type": "upstream-oauth-provider", + "id": "01040G2081040G2081040G2081", + "attributes": { + "issuer": "https://accounts.google.com", + "human_name": "Google", + "brand_name": "google", + "created_at": "1970-01-01T00:00:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01040G2081040G2081040G2081" + }, + "meta": { + "page": { + "cursor": "01040G2081040G2081040G2081" + } + } + }, + { + "type": "upstream-oauth-provider", + "id": "02081040G2081040G2081040G2", + "attributes": { + "issuer": "https://appleid.apple.com", + "human_name": "Apple ID", + "brand_name": "apple", + "created_at": "1970-01-01T00:00:00Z", + "disabled_at": "1970-01-01T00:00:00Z" + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/02081040G2081040G2081040G2" + }, + "meta": { + "page": { + "cursor": "02081040G2081040G2081040G2" + } + } + }, + { + "type": "upstream-oauth-provider", + "id": "030C1G60R30C1G60R30C1G60R3", + "attributes": { + "issuer": null, + "human_name": "Custom OAuth Provider", + "brand_name": null, + "created_at": "1970-01-01T00:00:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/030C1G60R30C1G60R30C1G60R3" + }, + "meta": { + "page": { + "cursor": "030C1G60R30C1G60R30C1G60R3" + } + } + } + ], + "links": { + "self": "/api/admin/v1/upstream-oauth-providers?page[first]=3", + "first": "/api/admin/v1/upstream-oauth-providers?page[first]=3", + "last": "/api/admin/v1/upstream-oauth-providers?page[last]=3", + "next": "/api/admin/v1/upstream-oauth-providers?page[after]=030C1G60R30C1G60R30C1G60R3&page[first]=3" + } + } + } + } + } + } + } + }, + "/api/admin/v1/upstream-oauth-providers/{id}": { + "get": { + "tags": [ + "upstream-oauth-provider" + ], + "summary": "Get upstream OAuth provider", + "operationId": "getUpstreamOAuthProvider", + "parameters": [ + { + "in": "path", + "name": "id", + "required": true, + "schema": { + "title": "The ID of the resource", + "$ref": "#/components/schemas/ULID" + }, + "style": "simple" + } + ], + "responses": { + "200": { + "description": "The upstream OAuth provider", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SingleResponse_for_UpstreamOAuthProvider" + }, + "example": { + "data": { + "type": "upstream-oauth-provider", + "id": "01040G2081040G2081040G2081", + "attributes": { + "issuer": "https://accounts.google.com", + "human_name": "Google", + "brand_name": "google", + "created_at": "1970-01-01T00:00:00Z", + "disabled_at": null + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01040G2081040G2081040G2081" + } + }, + "links": { + "self": "/api/admin/v1/upstream-oauth-providers/01040G2081040G2081040G2081" + } + } + } + } + }, + "404": { + "description": "Provider not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorResponse" + } + } + } + } + } + } + } + }, + "components": { + "securitySchemes": { + "oauth2": { + "type": "oauth2", + "flows": { + "clientCredentials": { + "refreshUrl": "./oauth2/token", + "tokenUrl": "./oauth2/token", + "scopes": { + "urn:mas:admin": "Grant access to the admin API" + } + }, + "authorizationCode": { + "authorizationUrl": "./authorize", + "tokenUrl": "./oauth2/token", + "refreshUrl": "./oauth2/token", + "scopes": { + "urn:mas:admin": "Grant access to the admin API" + } + } + } + }, + "token": { + "type": "http", + "scheme": "bearer", + "description": "An access token with access to the admin API" + } + }, + "schemas": { + "SiteConfig": { + "type": "object", + "properties": { + "server_name": { + "description": "The Matrix server name for which this instance is configured", + "type": "string" + }, + "password_login_enabled": { + "description": "Whether password login is enabled.", + "type": "boolean" + }, + "password_registration_enabled": { + "description": "Whether password registration is enabled.", + "type": "boolean" + }, + "password_registration_email_required": { + "description": "Whether a valid email address is required for password registrations.", + "type": "boolean" + }, + "registration_token_required": { + "description": "Whether registration tokens are required for password registrations.", + "type": "boolean" + }, + "email_change_allowed": { + "description": "Whether users can change their email.", + "type": "boolean" + }, + "displayname_change_allowed": { + "description": "Whether users can change their display name.", + "type": "boolean" + }, + "password_change_allowed": { + "description": "Whether users can change their password.", + "type": "boolean" + }, + "account_recovery_allowed": { + "description": "Whether users can recover their account via email.", + "type": "boolean" + }, + "account_deactivation_allowed": { + "description": "Whether users can delete their own account.", + "type": "boolean" + }, + "captcha_enabled": { + "description": "Whether CAPTCHA during registration is enabled.", + "type": "boolean" + }, + "minimum_password_complexity": { + "description": "Minimum password complexity, between 0 and 4.\n This is a score from zxcvbn.", + "type": "integer", + "format": "uint8", + "minimum": 0, + "maximum": 4 + } + }, + "required": [ + "server_name", + "password_login_enabled", + "password_registration_enabled", + "password_registration_email_required", + "registration_token_required", + "email_change_allowed", + "displayname_change_allowed", + "password_change_allowed", + "account_recovery_allowed", + "account_deactivation_allowed", + "captcha_enabled", + "minimum_password_complexity" + ] + }, + "Version": { + "type": "object", + "properties": { + "version": { + "description": "The semver version of the app", + "type": "string" + } + }, + "required": [ + "version" + ] + }, + "PaginationParams": { + "type": "object", + "properties": { + "page[before]": { + "description": "Retrieve the items before the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "page[after]": { + "description": "Retrieve the items after the given ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "page[first]": { + "description": "Retrieve the first N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "page[last]": { + "description": "Retrieve the last N items", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 1 + }, + "count": { + "description": "Include the total number of items. Defaults to `true`.", + "anyOf": [ + { + "$ref": "#/components/schemas/IncludeCount" + }, + { + "type": "null" + } + ] + } + } + }, + "ULID": { + "title": "ULID", + "description": "A ULID as per https://github.com/ulid/spec", + "type": "string", + "pattern": "^[0123456789ABCDEFGHJKMNPQRSTVWXYZ]{26}$", + "example": "01ARZ3NDEKTSV4RRFFQ69G5FAV" + }, + "IncludeCount": { + "oneOf": [ + { + "description": "Include the total number of items (default)", + "type": "string", + "enum": [ + "true" + ] + }, + { + "description": "Do not include the total number of items", + "type": "string", + "enum": [ + "false" + ] + }, + { + "description": "Only include the total number of items, skip the items themselves", + "type": "string", + "enum": [ + "only" + ] + } + ] + }, + "CompatSessionFilter": { + "type": "object", + "properties": { + "filter[user]": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[user-session]": { + "description": "Retrieve the items started from the given browser session", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[status]": { + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all sessions, including finished ones.\n\n * `active`: Only retrieve active sessions\n\n * `finished`: Only retrieve finished sessions", + "anyOf": [ + { + "$ref": "#/components/schemas/CompatSessionStatus" + }, + { + "type": "null" + } + ] + } + } + }, + "CompatSessionStatus": { + "type": "string", + "enum": [ + "active", + "finished" + ] + }, + "PaginatedResponse_for_CompatSession": { + "description": "A top-level response with a page of resources", + "type": "object", + "properties": { + "meta": { + "description": "Response metadata", + "anyOf": [ + { + "$ref": "#/components/schemas/PaginationMeta" + }, + { + "type": "null" + } + ] + }, + "data": { + "description": "The list of resources", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/SingleResource_for_CompatSession" + } + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/PaginationLinks" + } + ] + } + }, + "required": [ + "links" + ] + }, + "PaginationMeta": { + "type": "object", + "properties": { + "count": { + "description": "The total number of results", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0 + } + } + }, + "SingleResource_for_CompatSession": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/CompatSession" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "CompatSession": { + "description": "A compatibility session for legacy clients", + "type": "object", + "properties": { + "user_id": { + "description": "The ID of the user that owns this session", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "device_id": { + "description": "The Matrix device ID of this session", + "allOf": [ + { + "$ref": "#/components/schemas/DeviceID" + } + ] + }, + "user_session_id": { + "description": "The ID of the user session that started this session, if any", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "redirect_uri": { + "description": "The redirect URI used to login in the client, if it was an SSO login", + "type": [ + "string", + "null" + ], + "format": "uri" + }, + "created_at": { + "description": "The time this session was created", + "type": "string", + "format": "date-time" + }, + "user_agent": { + "description": "The user agent string that started this session, if any", + "type": [ + "string", + "null" + ] + }, + "last_active_at": { + "description": "The time this session was last active", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "last_active_ip": { + "description": "The last IP address recorded for this session", + "type": [ + "string", + "null" + ], + "format": "ip" + }, + "finished_at": { + "description": "The time this session was finished", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "human_name": { + "description": "The user-provided name, if any", + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "user_id", + "device_id", + "user_session_id", + "created_at" + ] + }, + "DeviceID": { + "title": "Device ID", + "description": "A device ID as per https://matrix.org/docs/spec/client_server/r0.6.0#device-ids", + "type": "string", + "pattern": "^[A-Za-z0-9._~!$&'()*+,;=:&/-]+$", + "example": "AABBCCDDEE" + }, + "SelfLinks": { + "description": "Related links", + "type": "object", + "properties": { + "self": { + "description": "The canonical link to the current resource", + "type": "string" + } + }, + "required": [ + "self" + ] + }, + "SingleResourceMeta": { + "description": "Metadata associated with a resource", + "type": "object", + "properties": { + "page": { + "description": "Information about the pagination of the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMetaPage" + }, + { + "type": "null" + } + ] + } + } + }, + "SingleResourceMetaPage": { + "description": "Pagination metadata for a resource", + "type": "object", + "properties": { + "cursor": { + "description": "The cursor of this resource in the paginated result", + "type": "string" + } + }, + "required": [ + "cursor" + ] + }, + "PaginationLinks": { + "description": "Related links", + "type": "object", + "properties": { + "self": { + "description": "The canonical link to the current page", + "type": "string" + }, + "first": { + "description": "The link to the first page of results", + "type": [ + "string", + "null" + ] + }, + "last": { + "description": "The link to the last page of results", + "type": [ + "string", + "null" + ] + }, + "next": { + "description": "The link to the next page of results\n\n Only present if there is a next page", + "type": [ + "string", + "null" + ] + }, + "prev": { + "description": "The link to the previous page of results\n\n Only present if there is a previous page", + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "self" + ] + }, + "ErrorResponse": { + "description": "A top-level response with a list of errors", + "type": "object", + "properties": { + "errors": { + "description": "The list of errors", + "type": "array", + "items": { + "$ref": "#/components/schemas/Error" + } + } + }, + "required": [ + "errors" + ] + }, + "Error": { + "description": "A single error", + "type": "object", + "properties": { + "title": { + "description": "A human-readable title for the error", + "type": "string" + } + }, + "required": [ + "title" + ] + }, + "UlidInPath": { + "type": "object", + "properties": { + "id": { + "title": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + } + }, + "required": [ + "id" + ] + }, + "SingleResponse_for_CompatSession": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_CompatSession" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + }, + "OAuth2SessionFilter": { + "type": "object", + "properties": { + "filter[user]": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[client]": { + "description": "Retrieve the items for the given client", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[client-kind]": { + "description": "Retrieve the items only for a specific client kind", + "anyOf": [ + { + "$ref": "#/components/schemas/OAuth2ClientKind" + }, + { + "type": "null" + } + ] + }, + "filter[user-session]": { + "description": "Retrieve the items started from the given browser session", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[scope]": { + "description": "Retrieve the items with the given scope", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "filter[status]": { + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all sessions, including finished ones.\n\n * `active`: Only retrieve active sessions\n\n * `finished`: Only retrieve finished sessions", + "anyOf": [ + { + "$ref": "#/components/schemas/OAuth2SessionStatus" + }, + { + "type": "null" + } + ] + } + } + }, + "OAuth2ClientKind": { + "type": "string", + "enum": [ + "dynamic", + "static" + ] + }, + "OAuth2SessionStatus": { + "type": "string", + "enum": [ + "active", + "finished" + ] + }, + "PaginatedResponse_for_OAuth2Session": { + "description": "A top-level response with a page of resources", + "type": "object", + "properties": { + "meta": { + "description": "Response metadata", + "anyOf": [ + { + "$ref": "#/components/schemas/PaginationMeta" + }, + { + "type": "null" + } + ] + }, + "data": { + "description": "The list of resources", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/SingleResource_for_OAuth2Session" + } + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/PaginationLinks" + } + ] + } + }, + "required": [ + "links" + ] + }, + "SingleResource_for_OAuth2Session": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/OAuth2Session" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "OAuth2Session": { + "description": "A OAuth 2.0 session", + "type": "object", + "properties": { + "created_at": { + "description": "When the object was created", + "type": "string", + "format": "date-time" + }, + "finished_at": { + "description": "When the session was finished", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "user_id": { + "description": "The ID of the user who owns the session", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "user_session_id": { + "description": "The ID of the browser session which started this session", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "client_id": { + "description": "The ID of the client which requested this session", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "scope": { + "description": "The scope granted for this session", + "type": "string" + }, + "user_agent": { + "description": "The user agent string of the client which started this session", + "type": [ + "string", + "null" + ] + }, + "last_active_at": { + "description": "The last time the session was active", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "last_active_ip": { + "description": "The last IP address used by the session", + "type": [ + "string", + "null" + ], + "format": "ip" + }, + "human_name": { + "description": "The user-provided name, if any", + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "created_at", + "client_id", + "scope" + ] + }, + "SingleResponse_for_OAuth2Session": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_OAuth2Session" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + }, + "PersonalSessionFilter": { + "type": "object", + "properties": { + "filter[owner_user]": { + "description": "Filter by owner user ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[owner_client]": { + "description": "Filter by owner `OAuth2` client ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[actor_user]": { + "description": "Filter by actor user ID", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[scope]": { + "description": "Retrieve the items with the given scope", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "filter[status]": { + "description": "Filter by session status", + "anyOf": [ + { + "$ref": "#/components/schemas/PersonalSessionStatus" + }, + { + "type": "null" + } + ] + }, + "filter[expires_before]": { + "description": "Filter by access token expiry date", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "filter[expires_after]": { + "description": "Filter by access token expiry date", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "filter[expires]": { + "description": "Filter by whether the access token has an expiry time", + "type": [ + "boolean", + "null" + ] + } + } + }, + "PersonalSessionStatus": { + "type": "string", + "enum": [ + "active", + "revoked" + ] + }, + "PaginatedResponse_for_PersonalSession": { + "description": "A top-level response with a page of resources", + "type": "object", + "properties": { + "meta": { + "description": "Response metadata", + "anyOf": [ + { + "$ref": "#/components/schemas/PaginationMeta" + }, + { + "type": "null" + } + ] + }, + "data": { + "description": "The list of resources", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/SingleResource_for_PersonalSession" + } + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/PaginationLinks" + } + ] + } + }, + "required": [ + "links" + ] + }, + "SingleResource_for_PersonalSession": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/PersonalSession" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "PersonalSession": { + "description": "A personal session (session using personal access tokens)", + "type": "object", + "properties": { + "created_at": { + "description": "When the session was created", + "type": "string", + "format": "date-time" + }, + "revoked_at": { + "description": "When the session was revoked, if applicable", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "owner_user_id": { + "description": "The ID of the user who owns this session (if user-owned)", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "owner_client_id": { + "description": "The ID of the `OAuth2` client that owns this session (if client-owned)", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "actor_user_id": { + "description": "The ID of the user that the session acts on behalf of", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "human_name": { + "description": "Human-readable name for the session", + "type": "string" + }, + "scope": { + "description": "`OAuth2` scopes for this session", + "type": "string" + }, + "last_active_at": { + "description": "When the session was last active", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "last_active_ip": { + "description": "IP address of last activity", + "type": [ + "string", + "null" + ], + "format": "ip" + }, + "expires_at": { + "description": "When the current token for this session expires.\n The session will need to be regenerated, producing a new access token,\n after this time.\n None if the current token won't expire or if the session is revoked.", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "access_token": { + "description": "The actual access token (only returned on creation)", + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "created_at", + "actor_user_id", + "human_name", + "scope" + ] + }, + "CreatePersonalSessionRequest": { + "title": "JSON payload for the `POST /api/admin/v1/personal-sessions` endpoint", + "type": "object", + "properties": { + "actor_user_id": { + "description": "The user this session will act on behalf of", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "human_name": { + "description": "Human-readable name for the session", + "type": "string" + }, + "scope": { + "description": "`OAuth2` scopes for this session", + "type": "string" + }, + "expires_in": { + "description": "Token expiry time in seconds.\n If not set, the token won't expire.", + "type": [ + "integer", + "null" + ], + "format": "uint32", + "minimum": 0 + } + }, + "required": [ + "actor_user_id", + "human_name", + "scope" + ] + }, + "SingleResponse_for_PersonalSession": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_PersonalSession" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + }, + "RegeneratePersonalSessionRequest": { + "title": "JSON payload for the `POST /api/admin/v1/personal-sessions/{id}/regenerate` endpoint", + "type": "object", + "properties": { + "expires_in": { + "description": "Token expiry time in seconds.\n If not set, the token won't expire.", + "type": [ + "integer", + "null" + ], + "format": "uint32", + "minimum": 0 + } + } + }, + "SetPolicyDataRequest": { + "title": "JSON payload for the `POST /api/admin/v1/policy-data`", + "type": "object", + "properties": { + "data": { + "example": { + "hello": "world", + "foo": 42, + "bar": true + } + } + }, + "required": [ + "data" + ] + }, + "SingleResponse_for_PolicyData": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_PolicyData" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + }, + "SingleResource_for_PolicyData": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/PolicyData" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "PolicyData": { + "description": "The policy data", + "type": "object", + "properties": { + "created_at": { + "description": "The creation date of the policy data", + "type": "string", + "format": "date-time" + }, + "data": { + "description": "The policy data content" + } + }, + "required": [ + "created_at", + "data" + ] + }, + "UserFilter": { + "type": "object", + "properties": { + "filter[admin]": { + "description": "Retrieve users with (or without) the `admin` flag set", + "type": [ + "boolean", + "null" + ] + }, + "filter[legacy-guest]": { + "description": "Retrieve users with (or without) the `legacy_guest` flag set", + "type": [ + "boolean", + "null" + ] + }, + "filter[search]": { + "description": "Retrieve users where the username matches contains the given string\n\n Note that this doesn't change the ordering of the result, which are\n still ordered by ID.", + "type": [ + "string", + "null" + ] + }, + "filter[status]": { + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all users, including locked ones.\n\n * `active`: Only retrieve active users\n\n * `locked`: Only retrieve locked users (includes deactivated users)\n\n * `deactivated`: Only retrieve deactivated users", + "anyOf": [ + { + "$ref": "#/components/schemas/UserStatus" + }, + { + "type": "null" + } + ] + } + } + }, + "UserStatus": { + "type": "string", + "enum": [ + "active", + "locked", + "deactivated" + ] + }, + "PaginatedResponse_for_User": { + "description": "A top-level response with a page of resources", + "type": "object", + "properties": { + "meta": { + "description": "Response metadata", + "anyOf": [ + { + "$ref": "#/components/schemas/PaginationMeta" + }, + { + "type": "null" + } + ] + }, + "data": { + "description": "The list of resources", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/SingleResource_for_User" + } + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/PaginationLinks" + } + ] + } + }, + "required": [ + "links" + ] + }, + "SingleResource_for_User": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/User" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "User": { + "description": "A user", + "type": "object", + "properties": { + "username": { + "description": "The username (localpart) of the user", + "type": "string" + }, + "created_at": { + "description": "When the user was created", + "type": "string", + "format": "date-time" + }, + "locked_at": { + "description": "When the user was locked. If null, the user is not locked.", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "deactivated_at": { + "description": "When the user was deactivated. If null, the user is not deactivated.", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "admin": { + "description": "Whether the user can request admin privileges.", + "type": "boolean" + }, + "legacy_guest": { + "description": "Whether the user was a guest before migrating to MAS,", + "type": "boolean" + } + }, + "required": [ + "username", + "created_at", + "admin", + "legacy_guest" + ] + }, + "AddUserRequest": { + "title": "JSON payload for the `POST /api/admin/v1/users` endpoint", + "type": "object", + "properties": { + "username": { + "description": "The username of the user to add.", + "type": "string" + }, + "skip_homeserver_check": { + "description": "Skip checking with the homeserver whether the username is available.\n\n Use this with caution! The main reason to use this, is when a user used\n by an application service needs to exist in MAS to craft special\n tokens (like with admin access) for them", + "type": "boolean", + "default": false + } + }, + "required": [ + "username" + ] + }, + "SingleResponse_for_User": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_User" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + }, + "SetUserPasswordRequest": { + "title": "JSON payload for the `POST /api/admin/v1/users/:id/set-password` endpoint", + "type": "object", + "properties": { + "password": { + "description": "The password to set for the user", + "type": "string", + "example": "hunter2" + }, + "skip_password_check": { + "description": "Skip the password complexity check", + "type": [ + "boolean", + "null" + ] + } + }, + "required": [ + "password" + ] + }, + "UsernamePathParam": { + "type": "object", + "properties": { + "username": { + "description": "The username (localpart) of the user to get", + "type": "string" + } + }, + "required": [ + "username" + ] + }, + "UserSetAdminRequest": { + "title": "JSON payload for the `POST /api/admin/v1/users/:id/set-admin` endpoint", + "type": "object", + "properties": { + "admin": { + "description": "Whether the user can request admin privileges.", + "type": "boolean" + } + }, + "required": [ + "admin" + ] + }, + "DeactivateUserRequest": { + "title": "JSON payload for the `POST /api/admin/v1/users/:id/deactivate` endpoint", + "type": "object", + "properties": { + "skip_erase": { + "description": "Whether to skip requesting the homeserver to GDPR-erase the user upon\n deactivation.", + "type": "boolean", + "default": false + } + } + }, + "UserEmailFilter": { + "type": "object", + "properties": { + "filter[user]": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[email]": { + "description": "Retrieve the user email with the given email address", + "type": [ + "string", + "null" + ] + } + } + }, + "PaginatedResponse_for_UserEmail": { + "description": "A top-level response with a page of resources", + "type": "object", + "properties": { + "meta": { + "description": "Response metadata", + "anyOf": [ + { + "$ref": "#/components/schemas/PaginationMeta" + }, + { + "type": "null" + } + ] + }, + "data": { + "description": "The list of resources", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/SingleResource_for_UserEmail" + } + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/PaginationLinks" + } + ] + } + }, + "required": [ + "links" + ] + }, + "SingleResource_for_UserEmail": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/UserEmail" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "UserEmail": { + "description": "An email address for a user", + "type": "object", + "properties": { + "created_at": { + "description": "When the object was created", + "type": "string", + "format": "date-time" + }, + "user_id": { + "description": "The ID of the user who owns this email address", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "email": { + "description": "The email address", + "type": "string" + } + }, + "required": [ + "created_at", + "user_id", + "email" + ] + }, + "AddUserEmailRequest": { + "title": "JSON payload for the `POST /api/admin/v1/user-emails`", + "type": "object", + "properties": { + "user_id": { + "description": "The ID of the user to which the email should be added.", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "email": { + "description": "The email address of the user to add.", + "type": "string", + "format": "email" + } + }, + "required": [ + "user_id", + "email" + ] + }, + "SingleResponse_for_UserEmail": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_UserEmail" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + }, + "UserSessionFilter": { + "type": "object", + "properties": { + "filter[user]": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[status]": { + "description": "Retrieve the items with the given status\n\n Defaults to retrieve all sessions, including finished ones.\n\n * `active`: Only retrieve active sessions\n\n * `finished`: Only retrieve finished sessions", + "anyOf": [ + { + "$ref": "#/components/schemas/UserSessionStatus" + }, + { + "type": "null" + } + ] + } + } + }, + "UserSessionStatus": { + "type": "string", + "enum": [ + "active", + "finished" + ] + }, + "PaginatedResponse_for_UserSession": { + "description": "A top-level response with a page of resources", + "type": "object", + "properties": { + "meta": { + "description": "Response metadata", + "anyOf": [ + { + "$ref": "#/components/schemas/PaginationMeta" + }, + { + "type": "null" + } + ] + }, + "data": { + "description": "The list of resources", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/SingleResource_for_UserSession" + } + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/PaginationLinks" + } + ] + } + }, + "required": [ + "links" + ] + }, + "SingleResource_for_UserSession": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/UserSession" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "UserSession": { + "description": "The browser (cookie) session for a user", + "type": "object", + "properties": { + "created_at": { + "description": "When the object was created", + "type": "string", + "format": "date-time" + }, + "finished_at": { + "description": "When the session was finished", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "user_id": { + "description": "The ID of the user who owns the session", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "user_agent": { + "description": "The user agent string of the client which started this session", + "type": [ + "string", + "null" + ] + }, + "last_active_at": { + "description": "The last time the session was active", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "last_active_ip": { + "description": "The last IP address used by the session", + "type": [ + "string", + "null" + ], + "format": "ip" + } + }, + "required": [ + "created_at", + "user_id" + ] + }, + "SingleResponse_for_UserSession": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_UserSession" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + }, + "RegistrationTokenFilter": { + "type": "object", + "properties": { + "filter[used]": { + "description": "Retrieve tokens that have (or have not) been used at least once", + "type": [ + "boolean", + "null" + ] + }, + "filter[revoked]": { + "description": "Retrieve tokens that are (or are not) revoked", + "type": [ + "boolean", + "null" + ] + }, + "filter[expired]": { + "description": "Retrieve tokens that are (or are not) expired", + "type": [ + "boolean", + "null" + ] + }, + "filter[valid]": { + "description": "Retrieve tokens that are (or are not) valid\n\n Valid means that the token has not expired, is not revoked, and has not\n reached its usage limit.", + "type": [ + "boolean", + "null" + ] + } + } + }, + "PaginatedResponse_for_UserRegistrationToken": { + "description": "A top-level response with a page of resources", + "type": "object", + "properties": { + "meta": { + "description": "Response metadata", + "anyOf": [ + { + "$ref": "#/components/schemas/PaginationMeta" + }, + { + "type": "null" + } + ] + }, + "data": { + "description": "The list of resources", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/SingleResource_for_UserRegistrationToken" + } + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/PaginationLinks" + } + ] + } + }, + "required": [ + "links" + ] + }, + "SingleResource_for_UserRegistrationToken": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/UserRegistrationToken" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "UserRegistrationToken": { + "description": "A registration token", + "type": "object", + "properties": { + "token": { + "description": "The token string", + "type": "string" + }, + "valid": { + "description": "Whether the token is valid", + "type": "boolean" + }, + "usage_limit": { + "description": "Maximum number of times this token can be used", + "type": [ + "integer", + "null" + ], + "format": "uint32", + "minimum": 0 + }, + "times_used": { + "description": "Number of times this token has been used", + "type": "integer", + "format": "uint32", + "minimum": 0 + }, + "created_at": { + "description": "When the token was created", + "type": "string", + "format": "date-time" + }, + "last_used_at": { + "description": "When the token was last used. If null, the token has never been used.", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "expires_at": { + "description": "When the token expires. If null, the token never expires.", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "revoked_at": { + "description": "When the token was revoked. If null, the token is not revoked.", + "type": [ + "string", + "null" + ], + "format": "date-time" + } + }, + "required": [ + "token", + "valid", + "times_used", + "created_at" + ] + }, + "AddUserRegistrationTokenRequest": { + "title": "JSON payload for the `POST /api/admin/v1/user-registration-tokens`", + "type": "object", + "properties": { + "token": { + "description": "The token string. If not provided, a random token will be generated.", + "type": [ + "string", + "null" + ] + }, + "usage_limit": { + "description": "Maximum number of times this token can be used. If not provided, the\n token can be used an unlimited number of times.", + "type": [ + "integer", + "null" + ], + "format": "uint32", + "minimum": 0 + }, + "expires_at": { + "description": "When the token expires. If not provided, the token never expires.", + "type": [ + "string", + "null" + ], + "format": "date-time" + } + } + }, + "SingleResponse_for_UserRegistrationToken": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_UserRegistrationToken" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + }, + "EditUserRegistrationTokenRequest": { + "title": "JSON payload for the `PUT /api/admin/v1/user-registration-tokens/{id}` endpoint", + "type": "object", + "properties": { + "expires_at": { + "description": "New expiration date for the token, or null to remove expiration", + "type": [ + "string", + "null" + ], + "format": "date-time" + }, + "usage_limit": { + "description": "New usage limit for the token, or null to remove the limit", + "type": [ + "integer", + "null" + ], + "format": "uint32", + "minimum": 0 + } + } + }, + "UpstreamOAuthLinkFilter": { + "type": "object", + "properties": { + "filter[user]": { + "description": "Retrieve the items for the given user", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[provider]": { + "description": "Retrieve the items for the given provider", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "filter[subject]": { + "description": "Retrieve the items with the given subject", + "type": [ + "string", + "null" + ] + } + } + }, + "PaginatedResponse_for_UpstreamOAuthLink": { + "description": "A top-level response with a page of resources", + "type": "object", + "properties": { + "meta": { + "description": "Response metadata", + "anyOf": [ + { + "$ref": "#/components/schemas/PaginationMeta" + }, + { + "type": "null" + } + ] + }, + "data": { + "description": "The list of resources", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/SingleResource_for_UpstreamOAuthLink" + } + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/PaginationLinks" + } + ] + } + }, + "required": [ + "links" + ] + }, + "SingleResource_for_UpstreamOAuthLink": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/UpstreamOAuthLink" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "UpstreamOAuthLink": { + "description": "An upstream OAuth 2.0 link", + "type": "object", + "properties": { + "created_at": { + "description": "When the object was created", + "type": "string", + "format": "date-time" + }, + "provider_id": { + "description": "The ID of the provider", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "subject": { + "description": "The subject of the upstream account, unique per provider", + "type": "string" + }, + "user_id": { + "description": "The ID of the user who owns this link, if any", + "anyOf": [ + { + "$ref": "#/components/schemas/ULID" + }, + { + "type": "null" + } + ] + }, + "human_account_name": { + "description": "A human-readable name of the upstream account", + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "created_at", + "provider_id", + "subject" + ] + }, + "AddUpstreamOauthLinkRequest": { + "title": "JSON payload for the `POST /api/admin/v1/upstream-oauth-links`", + "type": "object", + "properties": { + "user_id": { + "description": "The ID of the user to which the link should be added.", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "provider_id": { + "description": "The ID of the upstream provider to which the link is for.", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "subject": { + "description": "The subject (sub) claim of the user on the provider.", + "type": "string" + }, + "human_account_name": { + "description": "A human readable account name.", + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "user_id", + "provider_id", + "subject" + ] + }, + "SingleResponse_for_UpstreamOAuthLink": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_UpstreamOAuthLink" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + }, + "UpstreamOAuthProviderFilter": { + "type": "object", + "properties": { + "filter[enabled]": { + "description": "Retrieve providers that are (or are not) enabled", + "type": [ + "boolean", + "null" + ] + } + } + }, + "PaginatedResponse_for_UpstreamOAuthProvider": { + "description": "A top-level response with a page of resources", + "type": "object", + "properties": { + "meta": { + "description": "Response metadata", + "anyOf": [ + { + "$ref": "#/components/schemas/PaginationMeta" + }, + { + "type": "null" + } + ] + }, + "data": { + "description": "The list of resources", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/components/schemas/SingleResource_for_UpstreamOAuthProvider" + } + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/PaginationLinks" + } + ] + } + }, + "required": [ + "links" + ] + }, + "SingleResource_for_UpstreamOAuthProvider": { + "description": "A single resource, with its type, ID, attributes and related links", + "type": "object", + "properties": { + "type": { + "description": "The type of the resource", + "type": "string" + }, + "id": { + "description": "The ID of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/ULID" + } + ] + }, + "attributes": { + "description": "The attributes of the resource", + "allOf": [ + { + "$ref": "#/components/schemas/UpstreamOAuthProvider" + } + ] + }, + "links": { + "description": "Related links", + "allOf": [ + { + "$ref": "#/components/schemas/SelfLinks" + } + ] + }, + "meta": { + "description": "Metadata about the resource", + "anyOf": [ + { + "$ref": "#/components/schemas/SingleResourceMeta" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "type", + "id", + "attributes", + "links" + ] + }, + "UpstreamOAuthProvider": { + "description": "An upstream OAuth 2.0 provider", + "type": "object", + "properties": { + "issuer": { + "description": "The OIDC issuer of the provider", + "type": [ + "string", + "null" + ] + }, + "human_name": { + "description": "A human-readable name for the provider", + "type": [ + "string", + "null" + ] + }, + "brand_name": { + "description": "A brand identifier, e.g. \"apple\" or \"google\"", + "type": [ + "string", + "null" + ] + }, + "created_at": { + "description": "When the provider was created", + "type": "string", + "format": "date-time" + }, + "disabled_at": { + "description": "When the provider was disabled. If null, the provider is enabled.", + "type": [ + "string", + "null" + ], + "format": "date-time" + } + }, + "required": [ + "created_at" + ] + }, + "SingleResponse_for_UpstreamOAuthProvider": { + "description": "A top-level response with a single resource", + "type": "object", + "properties": { + "data": { + "$ref": "#/components/schemas/SingleResource_for_UpstreamOAuthProvider" + }, + "links": { + "$ref": "#/components/schemas/SelfLinks" + } + }, + "required": [ + "data", + "links" + ] + } + } + }, + "security": [ + { + "oauth2": [ + "urn:mas:admin" + ] + }, + { + "bearer": [ + "urn:mas:admin" + ] + } + ], + "tags": [ + { + "name": "server", + "description": "Information about the server" + }, + { + "name": "compat-session", + "description": "Manage compatibility sessions from legacy clients" + }, + { + "name": "policy-data", + "description": "Manage the dynamic policy data" + }, + { + "name": "oauth2-session", + "description": "Manage OAuth2 sessions" + }, + { + "name": "user", + "description": "Manage users" + }, + { + "name": "user-email", + "description": "Manage emails associated with users" + }, + { + "name": "user-session", + "description": "Manage browser sessions of users" + }, + { + "name": "user-registration-token", + "description": "Manage user registration tokens" + }, + { + "name": "upstream-oauth-link", + "description": "Manage links between local users and identities from upstream OAuth 2.0 providers" + }, + { + "name": "upstream-oauth-provider", + "description": "Manage upstream OAuth 2.0 providers" + } + ] +} diff --git a/matrix-authentication-service/docs/as-login.md b/matrix-authentication-service/docs/as-login.md new file mode 100644 index 00000000..0eed4f9e --- /dev/null +++ b/matrix-authentication-service/docs/as-login.md @@ -0,0 +1,7 @@ +# About Application Services login + +Encrypted Application Services/Bridges currently leverage the `m.login.application_service` login type to create devices for users. +This API is *not* available in the Matrix Authentication Service. + +We're working on a solution to support this use case, but in the meantime, this means **encrypted bridges will not work with the Matrix Authentication Service.** +A workaround is to disable E2EE support in your bridge setup. diff --git a/matrix-authentication-service/docs/config.schema.json b/matrix-authentication-service/docs/config.schema.json new file mode 100644 index 00000000..f6d947e4 --- /dev/null +++ b/matrix-authentication-service/docs/config.schema.json @@ -0,0 +1,2917 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "RootConfig", + "description": "Application configuration root", + "type": "object", + "properties": { + "clients": { + "description": "List of OAuth 2.0/OIDC clients config", + "type": "array", + "items": { + "$ref": "#/definitions/ClientConfig" + } + }, + "http": { + "description": "Configuration of the HTTP server", + "default": { + "listeners": [ + { + "name": "web", + "resources": [ + { + "name": "discovery" + }, + { + "name": "human" + }, + { + "name": "oauth" + }, + { + "name": "compat" + }, + { + "name": "graphql" + }, + { + "name": "assets" + } + ], + "binds": [ + { + "address": "[::]:8080" + } + ], + "proxy_protocol": false + }, + { + "name": "internal", + "resources": [ + { + "name": "health" + } + ], + "binds": [ + { + "host": "localhost", + "port": 8081 + } + ], + "proxy_protocol": false + } + ], + "trusted_proxies": [ + "192.168.0.0/16", + "172.16.0.0/12", + "10.0.0.0/10", + "127.0.0.1/8", + "fd00::/8", + "::1/128" + ], + "public_base": "http://[::]:8080/", + "issuer": "http://[::]:8080/" + }, + "allOf": [ + { + "$ref": "#/definitions/HttpConfig" + } + ] + }, + "database": { + "description": "Database connection configuration", + "default": { + "uri": "postgresql://", + "max_connections": 10, + "min_connections": 0, + "connect_timeout": 30, + "idle_timeout": 600, + "max_lifetime": 1800 + }, + "allOf": [ + { + "$ref": "#/definitions/DatabaseConfig" + } + ] + }, + "telemetry": { + "description": "Configuration related to sending monitoring data", + "allOf": [ + { + "$ref": "#/definitions/TelemetryConfig" + } + ] + }, + "templates": { + "description": "Configuration related to templates", + "allOf": [ + { + "$ref": "#/definitions/TemplatesConfig" + } + ] + }, + "email": { + "description": "Configuration related to sending emails", + "default": { + "from": "\"Authentication Service\" ", + "reply_to": "\"Authentication Service\" ", + "transport": "blackhole" + }, + "allOf": [ + { + "$ref": "#/definitions/EmailConfig" + } + ] + }, + "secrets": { + "description": "Application secrets", + "allOf": [ + { + "$ref": "#/definitions/SecretsConfig" + } + ] + }, + "passwords": { + "description": "Configuration related to user passwords", + "default": { + "enabled": true, + "schemes": [ + { + "version": 1, + "algorithm": "argon2id" + } + ], + "minimum_complexity": 3 + }, + "allOf": [ + { + "$ref": "#/definitions/PasswordsConfig" + } + ] + }, + "matrix": { + "description": "Configuration related to the homeserver", + "allOf": [ + { + "$ref": "#/definitions/MatrixConfig" + } + ] + }, + "policy": { + "description": "Configuration related to the OPA policies", + "allOf": [ + { + "$ref": "#/definitions/PolicyConfig" + } + ] + }, + "rate_limiting": { + "description": "Configuration related to limiting the rate of user actions to prevent\n abuse", + "allOf": [ + { + "$ref": "#/definitions/RateLimitingConfig" + } + ] + }, + "upstream_oauth2": { + "description": "Configuration related to upstream OAuth providers", + "allOf": [ + { + "$ref": "#/definitions/UpstreamOAuth2Config" + } + ] + }, + "branding": { + "description": "Configuration section for tweaking the branding of the service", + "allOf": [ + { + "$ref": "#/definitions/BrandingConfig" + } + ] + }, + "captcha": { + "description": "Configuration section to setup CAPTCHA protection on a few operations", + "allOf": [ + { + "$ref": "#/definitions/CaptchaConfig" + } + ] + }, + "account": { + "description": "Configuration section to configure features related to account\n management", + "allOf": [ + { + "$ref": "#/definitions/AccountConfig" + } + ] + }, + "experimental": { + "description": "Experimental configuration options", + "allOf": [ + { + "$ref": "#/definitions/ExperimentalConfig" + } + ] + } + }, + "required": [ + "secrets", + "matrix" + ], + "definitions": { + "ClientConfig": { + "description": "An OAuth 2.0 client configuration", + "type": "object", + "properties": { + "client_id": { + "description": "A ULID as per https://github.com/ulid/spec", + "type": "string", + "pattern": "^[0123456789ABCDEFGHJKMNPQRSTVWXYZ]{26}$" + }, + "client_auth_method": { + "description": "Authentication method used for this client", + "allOf": [ + { + "$ref": "#/definitions/ClientAuthMethodConfig" + } + ] + }, + "client_name": { + "description": "Name of the `OAuth2` client", + "type": [ + "string", + "null" + ] + }, + "client_secret_file": { + "description": "Path to the file containing the client secret. The client secret is used\n by the `client_secret_basic`, `client_secret_post` and\n `client_secret_jwt` authentication methods.", + "type": [ + "string", + "null" + ] + }, + "client_secret": { + "description": "Alternative to `client_secret_file`: Reads the client secret directly\n from the config.", + "type": [ + "string", + "null" + ] + }, + "jwks": { + "description": "The JSON Web Key Set (JWKS) used by the `private_key_jwt` authentication\n method. Mutually exclusive with `jwks_uri`", + "anyOf": [ + { + "$ref": "#/definitions/JsonWebKeySet_for_JsonWebKeyPublicParameters" + }, + { + "type": "null" + } + ] + }, + "jwks_uri": { + "description": "The URL of the JSON Web Key Set (JWKS) used by the `private_key_jwt`\n authentication method. Mutually exclusive with `jwks`", + "type": [ + "string", + "null" + ], + "format": "uri" + }, + "redirect_uris": { + "description": "List of allowed redirect URIs", + "type": "array", + "items": { + "type": "string", + "format": "uri" + } + } + }, + "required": [ + "client_id", + "client_auth_method" + ] + }, + "ClientAuthMethodConfig": { + "description": "Authentication method used by clients", + "oneOf": [ + { + "description": "`none`: No authentication", + "type": "string", + "const": "none" + }, + { + "description": "`client_secret_basic`: `client_id` and `client_secret` used as basic\n authorization credentials", + "type": "string", + "const": "client_secret_basic" + }, + { + "description": "`client_secret_post`: `client_id` and `client_secret` sent in the\n request body", + "type": "string", + "const": "client_secret_post" + }, + { + "description": "`client_secret_basic`: a `client_assertion` sent in the request body and\n signed using the `client_secret`", + "type": "string", + "const": "client_secret_jwt" + }, + { + "description": "`client_secret_basic`: a `client_assertion` sent in the request body and\n signed by an asymmetric key", + "type": "string", + "const": "private_key_jwt" + } + ] + }, + "JsonWebKeySet_for_JsonWebKeyPublicParameters": { + "type": "object", + "properties": { + "keys": { + "type": "array", + "items": { + "$ref": "#/definitions/JsonWebKey_for_JsonWebKeyPublicParameters" + } + } + }, + "required": [ + "keys" + ] + }, + "JsonWebKey_for_JsonWebKeyPublicParameters": { + "type": "object", + "properties": { + "use": { + "anyOf": [ + { + "$ref": "#/definitions/JsonWebKeyUse" + }, + { + "type": "null" + } + ] + }, + "key_ops": { + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/JsonWebKeyOperation" + } + }, + "alg": { + "anyOf": [ + { + "$ref": "#/definitions/JsonWebSignatureAlg" + }, + { + "type": "null" + } + ] + }, + "kid": { + "type": [ + "string", + "null" + ] + }, + "x5u": { + "type": [ + "string", + "null" + ] + }, + "x5c": { + "type": "array", + "items": { + "type": "string" + } + }, + "x5t": { + "type": [ + "string", + "null" + ] + }, + "x5t#S256": { + "type": [ + "string", + "null" + ] + } + }, + "oneOf": [ + { + "type": "object", + "properties": { + "kty": { + "type": "string", + "const": "RSA" + } + }, + "required": [ + "kty" + ], + "allOf": [ + { + "$ref": "#/definitions/RsaPublicParameters" + } + ] + }, + { + "type": "object", + "properties": { + "kty": { + "type": "string", + "const": "EC" + } + }, + "required": [ + "kty" + ], + "allOf": [ + { + "$ref": "#/definitions/EcPublicParameters" + } + ] + }, + { + "type": "object", + "properties": { + "kty": { + "type": "string", + "const": "OKP" + } + }, + "required": [ + "kty" + ], + "allOf": [ + { + "$ref": "#/definitions/OkpPublicParameters" + } + ] + } + ] + }, + "RsaPublicParameters": { + "type": "object", + "properties": { + "n": { + "type": "string" + }, + "e": { + "type": "string" + } + }, + "required": [ + "n", + "e" + ] + }, + "JsonWebKeyEcEllipticCurve": { + "description": "JSON Web Key EC Elliptic Curve", + "anyOf": [ + { + "description": "P-256 Curve", + "const": "P-256" + }, + { + "description": "P-384 Curve", + "const": "P-384" + }, + { + "description": "P-521 Curve", + "const": "P-521" + }, + { + "description": "SECG secp256k1 curve", + "const": "secp256k1" + } + ] + }, + "EcPublicParameters": { + "type": "object", + "properties": { + "crv": { + "$ref": "#/definitions/JsonWebKeyEcEllipticCurve" + }, + "x": { + "type": "string" + }, + "y": { + "type": "string" + } + }, + "required": [ + "crv", + "x", + "y" + ] + }, + "JsonWebKeyOkpEllipticCurve": { + "description": "JSON Web Key OKP Elliptic Curve", + "anyOf": [ + { + "description": "Ed25519 signature algorithm key pairs", + "const": "Ed25519" + }, + { + "description": "Ed448 signature algorithm key pairs", + "const": "Ed448" + }, + { + "description": "X25519 function key pairs", + "const": "X25519" + }, + { + "description": "X448 function key pairs", + "const": "X448" + } + ] + }, + "OkpPublicParameters": { + "type": "object", + "properties": { + "crv": { + "$ref": "#/definitions/JsonWebKeyOkpEllipticCurve" + }, + "x": { + "type": "string" + } + }, + "required": [ + "crv", + "x" + ] + }, + "JsonWebKeyUse": { + "description": "JSON Web Key Use", + "anyOf": [ + { + "description": "Digital Signature or MAC", + "const": "sig" + }, + { + "description": "Encryption", + "const": "enc" + } + ] + }, + "JsonWebKeyOperation": { + "description": "JSON Web Key Operation", + "anyOf": [ + { + "description": "Compute digital signature or MAC", + "const": "sign" + }, + { + "description": "Verify digital signature or MAC", + "const": "verify" + }, + { + "description": "Encrypt content", + "const": "encrypt" + }, + { + "description": "Decrypt content and validate decryption, if applicable", + "const": "decrypt" + }, + { + "description": "Encrypt key", + "const": "wrapKey" + }, + { + "description": "Decrypt key and validate decryption, if applicable", + "const": "unwrapKey" + }, + { + "description": "Derive key", + "const": "deriveKey" + }, + { + "description": "Derive bits not to be used as a key", + "const": "deriveBits" + } + ] + }, + "JsonWebSignatureAlg": { + "description": "JSON Web Signature \"alg\" parameter", + "anyOf": [ + { + "description": "HMAC using SHA-256", + "const": "HS256" + }, + { + "description": "HMAC using SHA-384", + "const": "HS384" + }, + { + "description": "HMAC using SHA-512", + "const": "HS512" + }, + { + "description": "RSASSA-PKCS1-v1_5 using SHA-256", + "const": "RS256" + }, + { + "description": "RSASSA-PKCS1-v1_5 using SHA-384", + "const": "RS384" + }, + { + "description": "RSASSA-PKCS1-v1_5 using SHA-512", + "const": "RS512" + }, + { + "description": "ECDSA using P-256 and SHA-256", + "const": "ES256" + }, + { + "description": "ECDSA using P-384 and SHA-384", + "const": "ES384" + }, + { + "description": "ECDSA using P-521 and SHA-512", + "const": "ES512" + }, + { + "description": "RSASSA-PSS using SHA-256 and MGF1 with SHA-256", + "const": "PS256" + }, + { + "description": "RSASSA-PSS using SHA-384 and MGF1 with SHA-384", + "const": "PS384" + }, + { + "description": "RSASSA-PSS using SHA-512 and MGF1 with SHA-512", + "const": "PS512" + }, + { + "description": "No digital signature or MAC performed", + "const": "none" + }, + { + "description": "EdDSA signature algorithms", + "const": "EdDSA" + }, + { + "description": "ECDSA using secp256k1 curve and SHA-256", + "const": "ES256K" + }, + { + "description": "EdDSA using Ed25519 curve", + "const": "Ed25519" + }, + { + "description": "EdDSA using Ed448 curve", + "const": "Ed448" + } + ] + }, + "HttpConfig": { + "description": "Configuration related to the web server", + "type": "object", + "properties": { + "listeners": { + "description": "List of listeners to run", + "type": "array", + "items": { + "$ref": "#/definitions/ListenerConfig" + }, + "default": [] + }, + "trusted_proxies": { + "description": "List of trusted reverse proxies that can set the `X-Forwarded-For`\n header", + "type": "array", + "items": { + "type": "string", + "format": "ip" + }, + "default": [ + "192.168.0.0/16", + "172.16.0.0/12", + "10.0.0.0/10", + "127.0.0.1/8", + "fd00::/8", + "::1/128" + ] + }, + "public_base": { + "description": "Public URL base from where the authentication service is reachable", + "type": "string", + "format": "uri" + }, + "issuer": { + "description": "OIDC issuer URL. Defaults to `public_base` if not set.", + "type": [ + "string", + "null" + ], + "format": "uri" + } + }, + "required": [ + "public_base" + ] + }, + "ListenerConfig": { + "description": "Configuration of a listener", + "type": "object", + "properties": { + "name": { + "description": "A unique name for this listener which will be shown in traces and in\n metrics labels", + "type": [ + "string", + "null" + ] + }, + "resources": { + "description": "List of resources to mount", + "type": "array", + "items": { + "$ref": "#/definitions/Resource" + } + }, + "prefix": { + "description": "HTTP prefix to mount the resources on", + "type": [ + "string", + "null" + ] + }, + "binds": { + "description": "List of sockets to bind", + "type": "array", + "items": { + "$ref": "#/definitions/BindConfig" + } + }, + "proxy_protocol": { + "description": "Accept `HAProxy`'s Proxy Protocol V1", + "type": "boolean", + "default": false + }, + "tls": { + "description": "If set, makes the listener use TLS with the provided certificate and key", + "anyOf": [ + { + "$ref": "#/definitions/TlsConfig" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "resources", + "binds" + ] + }, + "Resource": { + "description": "HTTP resources to mount", + "oneOf": [ + { + "description": "Healthcheck endpoint (/health)", + "type": "object", + "properties": { + "name": { + "type": "string", + "const": "health" + } + }, + "required": [ + "name" + ] + }, + { + "description": "Prometheus metrics endpoint (/metrics)", + "type": "object", + "properties": { + "name": { + "type": "string", + "const": "prometheus" + } + }, + "required": [ + "name" + ] + }, + { + "description": "OIDC discovery endpoints", + "type": "object", + "properties": { + "name": { + "type": "string", + "const": "discovery" + } + }, + "required": [ + "name" + ] + }, + { + "description": "Pages destined to be viewed by humans", + "type": "object", + "properties": { + "name": { + "type": "string", + "const": "human" + } + }, + "required": [ + "name" + ] + }, + { + "description": "GraphQL endpoint", + "type": "object", + "properties": { + "playground": { + "description": "Enabled the GraphQL playground", + "type": "boolean" + }, + "undocumented_oauth2_access": { + "description": "Allow access for OAuth 2.0 clients (undocumented)", + "type": "boolean" + }, + "name": { + "type": "string", + "const": "graphql" + } + }, + "required": [ + "name" + ] + }, + { + "description": "OAuth-related APIs", + "type": "object", + "properties": { + "name": { + "type": "string", + "const": "oauth" + } + }, + "required": [ + "name" + ] + }, + { + "description": "Matrix compatibility API", + "type": "object", + "properties": { + "name": { + "type": "string", + "const": "compat" + } + }, + "required": [ + "name" + ] + }, + { + "description": "Static files", + "type": "object", + "properties": { + "path": { + "description": "Path to the directory to serve.", + "type": "string" + }, + "name": { + "type": "string", + "const": "assets" + } + }, + "required": [ + "name" + ] + }, + { + "description": "Admin API, served at `/api/admin/v1`", + "type": "object", + "properties": { + "name": { + "type": "string", + "const": "adminapi" + } + }, + "required": [ + "name" + ] + }, + { + "description": "Mount a \"/connection-info\" handler which helps debugging informations on\n the upstream connection", + "type": "object", + "properties": { + "name": { + "type": "string", + "const": "connection-info" + } + }, + "required": [ + "name" + ] + } + ] + }, + "BindConfig": { + "description": "Configuration of a single listener", + "anyOf": [ + { + "description": "Listen on the specified host and port", + "type": "object", + "properties": { + "host": { + "description": "Host on which to listen.\n\n Defaults to listening on all addresses", + "type": [ + "string", + "null" + ] + }, + "port": { + "description": "Port on which to listen.", + "type": "integer", + "format": "uint16", + "minimum": 0, + "maximum": 65535 + } + }, + "required": [ + "port" + ] + }, + { + "description": "Listen on the specified address", + "type": "object", + "properties": { + "address": { + "description": "Host and port on which to listen", + "type": "string", + "examples": [ + "[::1]:8080", + "[::]:8080", + "127.0.0.1:8080", + "0.0.0.0:8080" + ] + } + }, + "required": [ + "address" + ] + }, + { + "description": "Listen on a UNIX domain socket", + "type": "object", + "properties": { + "socket": { + "description": "Path to the socket", + "type": "string" + } + }, + "required": [ + "socket" + ] + }, + { + "description": "Accept connections on file descriptors passed by the parent process.\n\n This is useful for grabbing sockets passed by systemd.\n\n See ", + "type": "object", + "properties": { + "fd": { + "description": "Index of the file descriptor. Note that this is offseted by 3\n because of the standard input/output sockets, so setting\n here a value of `0` will grab the file descriptor `3`", + "type": "integer", + "format": "uint", + "minimum": 0, + "default": 0 + }, + "kind": { + "description": "Whether the socket is a TCP socket or a UNIX domain socket. Defaults\n to TCP.", + "default": "tcp", + "allOf": [ + { + "$ref": "#/definitions/UnixOrTcp" + } + ] + } + } + } + ] + }, + "UnixOrTcp": { + "description": "Kind of socket", + "oneOf": [ + { + "description": "UNIX domain socket", + "type": "string", + "const": "unix" + }, + { + "description": "TCP socket", + "type": "string", + "const": "tcp" + } + ] + }, + "TlsConfig": { + "description": "Configuration related to TLS on a listener", + "type": "object", + "properties": { + "certificate": { + "description": "PEM-encoded X509 certificate chain\n\n Exactly one of `certificate` or `certificate_file` must be set.", + "type": [ + "string", + "null" + ] + }, + "certificate_file": { + "description": "File containing the PEM-encoded X509 certificate chain\n\n Exactly one of `certificate` or `certificate_file` must be set.", + "type": [ + "string", + "null" + ] + }, + "key": { + "description": "PEM-encoded private key\n\n Exactly one of `key` or `key_file` must be set.", + "type": [ + "string", + "null" + ] + }, + "key_file": { + "description": "File containing a PEM or DER-encoded private key\n\n Exactly one of `key` or `key_file` must be set.", + "type": [ + "string", + "null" + ] + }, + "password": { + "description": "Password used to decode the private key\n\n One of `password` or `password_file` must be set if the key is\n encrypted.", + "type": [ + "string", + "null" + ] + }, + "password_file": { + "description": "Password file used to decode the private key\n\n One of `password` or `password_file` must be set if the key is\n encrypted.", + "type": [ + "string", + "null" + ] + } + } + }, + "DatabaseConfig": { + "description": "Database connection configuration", + "type": "object", + "properties": { + "uri": { + "description": "Connection URI\n\n This must not be specified if `host`, `port`, `socket`, `username`,\n `password`, or `database` are specified.", + "type": [ + "string", + "null" + ], + "format": "uri", + "default": "postgresql://" + }, + "host": { + "description": "Name of host to connect to\n\n This must not be specified if `uri` is specified.", + "anyOf": [ + { + "$ref": "#/definitions/Hostname" + }, + { + "type": "null" + } + ] + }, + "port": { + "description": "Port number to connect at the server host\n\n This must not be specified if `uri` is specified.", + "type": [ + "integer", + "null" + ], + "format": "uint16", + "minimum": 1, + "maximum": 65535 + }, + "socket": { + "description": "Directory containing the UNIX socket to connect to\n\n This must not be specified if `uri` is specified.", + "type": [ + "string", + "null" + ] + }, + "username": { + "description": "PostgreSQL user name to connect as\n\n This must not be specified if `uri` is specified.", + "type": [ + "string", + "null" + ] + }, + "password": { + "description": "Password to be used if the server demands password authentication\n\n This must not be specified if `uri` is specified.", + "type": [ + "string", + "null" + ] + }, + "database": { + "description": "The database name\n\n This must not be specified if `uri` is specified.", + "type": [ + "string", + "null" + ] + }, + "ssl_mode": { + "description": "How to handle SSL connections", + "anyOf": [ + { + "$ref": "#/definitions/PgSslMode" + }, + { + "type": "null" + } + ] + }, + "ssl_ca": { + "description": "The PEM-encoded root certificate for SSL connections\n\n This must not be specified if the `ssl_ca_file` option is specified.", + "type": [ + "string", + "null" + ] + }, + "ssl_ca_file": { + "description": "Path to the root certificate for SSL connections\n\n This must not be specified if the `ssl_ca` option is specified.", + "type": [ + "string", + "null" + ] + }, + "ssl_certificate": { + "description": "The PEM-encoded client certificate for SSL connections\n\n This must not be specified if the `ssl_certificate_file` option is\n specified.", + "type": [ + "string", + "null" + ] + }, + "ssl_certificate_file": { + "description": "Path to the client certificate for SSL connections\n\n This must not be specified if the `ssl_certificate` option is specified.", + "type": [ + "string", + "null" + ] + }, + "ssl_key": { + "description": "The PEM-encoded client key for SSL connections\n\n This must not be specified if the `ssl_key_file` option is specified.", + "type": [ + "string", + "null" + ] + }, + "ssl_key_file": { + "description": "Path to the client key for SSL connections\n\n This must not be specified if the `ssl_key` option is specified.", + "type": [ + "string", + "null" + ] + }, + "max_connections": { + "description": "Set the maximum number of connections the pool should maintain", + "type": "integer", + "format": "uint32", + "minimum": 1, + "default": 10 + }, + "min_connections": { + "description": "Set the minimum number of connections the pool should maintain", + "type": "integer", + "format": "uint32", + "minimum": 0, + "default": 0 + }, + "connect_timeout": { + "description": "Set the amount of time to attempt connecting to the database", + "type": "integer", + "format": "uint64", + "minimum": 0, + "default": 30 + }, + "idle_timeout": { + "description": "Set a maximum idle duration for individual connections", + "type": [ + "integer", + "null" + ], + "format": "uint64", + "minimum": 0, + "default": 600 + }, + "max_lifetime": { + "description": "Set the maximum lifetime of individual connections", + "type": "integer", + "format": "uint64", + "minimum": 0, + "default": 1800 + } + } + }, + "Hostname": { + "type": "string", + "format": "hostname" + }, + "PgSslMode": { + "description": "Options for controlling the level of protection provided for PostgreSQL SSL\n connections.", + "oneOf": [ + { + "description": "Only try a non-SSL connection.", + "type": "string", + "const": "disable" + }, + { + "description": "First try a non-SSL connection; if that fails, try an SSL connection.", + "type": "string", + "const": "allow" + }, + { + "description": "First try an SSL connection; if that fails, try a non-SSL connection.", + "type": "string", + "const": "prefer" + }, + { + "description": "Only try an SSL connection. If a root CA file is present, verify the\n connection in the same way as if `VerifyCa` was specified.", + "type": "string", + "const": "require" + }, + { + "description": "Only try an SSL connection, and verify that the server certificate is\n issued by a trusted certificate authority (CA).", + "type": "string", + "const": "verify-ca" + }, + { + "description": "Only try an SSL connection; verify that the server certificate is issued\n by a trusted CA and that the requested server host name matches that\n in the certificate.", + "type": "string", + "const": "verify-full" + } + ] + }, + "TelemetryConfig": { + "description": "Configuration related to sending monitoring data", + "type": "object", + "properties": { + "tracing": { + "description": "Configuration related to exporting traces", + "allOf": [ + { + "$ref": "#/definitions/TracingConfig" + } + ] + }, + "metrics": { + "description": "Configuration related to exporting metrics", + "allOf": [ + { + "$ref": "#/definitions/MetricsConfig" + } + ] + }, + "sentry": { + "description": "Configuration related to the Sentry integration", + "allOf": [ + { + "$ref": "#/definitions/SentryConfig" + } + ] + } + } + }, + "TracingConfig": { + "description": "Configuration related to exporting traces", + "type": "object", + "properties": { + "exporter": { + "description": "Exporter to use when exporting traces", + "default": "none", + "allOf": [ + { + "$ref": "#/definitions/TracingExporterKind" + } + ] + }, + "endpoint": { + "description": "OTLP exporter: OTLP over HTTP compatible endpoint", + "type": [ + "string", + "null" + ], + "format": "uri", + "default": "https://localhost:4318" + }, + "propagators": { + "description": "List of propagation formats to use for incoming and outgoing requests", + "type": "array", + "items": { + "$ref": "#/definitions/Propagator" + }, + "default": [] + }, + "sample_rate": { + "description": "Sample rate for traces\n\n Defaults to `1.0` if not set.", + "type": [ + "number", + "null" + ], + "format": "double", + "examples": [ + 0.5 + ], + "minimum": 0.0, + "maximum": 1.0 + } + } + }, + "TracingExporterKind": { + "description": "Exporter to use when exporting traces", + "oneOf": [ + { + "description": "Don't export traces", + "type": "string", + "const": "none" + }, + { + "description": "Export traces to the standard output. Only useful for debugging", + "type": "string", + "const": "stdout" + }, + { + "description": "Export traces to an OpenTelemetry protocol compatible endpoint", + "type": "string", + "const": "otlp" + } + ] + }, + "Propagator": { + "description": "Propagation format for incoming and outgoing requests", + "oneOf": [ + { + "description": "Propagate according to the W3C Trace Context specification", + "type": "string", + "const": "tracecontext" + }, + { + "description": "Propagate according to the W3C Baggage specification", + "type": "string", + "const": "baggage" + }, + { + "description": "Propagate trace context with Jaeger compatible headers", + "type": "string", + "const": "jaeger" + } + ] + }, + "MetricsConfig": { + "description": "Configuration related to exporting metrics", + "type": "object", + "properties": { + "exporter": { + "description": "Exporter to use when exporting metrics", + "default": "none", + "allOf": [ + { + "$ref": "#/definitions/MetricsExporterKind" + } + ] + }, + "endpoint": { + "description": "OTLP exporter: OTLP over HTTP compatible endpoint", + "type": [ + "string", + "null" + ], + "format": "uri", + "default": "https://localhost:4318" + } + } + }, + "MetricsExporterKind": { + "description": "Exporter to use when exporting metrics", + "oneOf": [ + { + "description": "Don't export metrics", + "type": "string", + "const": "none" + }, + { + "description": "Export metrics to stdout. Only useful for debugging", + "type": "string", + "const": "stdout" + }, + { + "description": "Export metrics to an OpenTelemetry protocol compatible endpoint", + "type": "string", + "const": "otlp" + }, + { + "description": "Export metrics via Prometheus. An HTTP listener with the `prometheus`\n resource must be setup to expose the Promethes metrics.", + "type": "string", + "const": "prometheus" + } + ] + }, + "SentryConfig": { + "description": "Configuration related to the Sentry integration", + "type": "object", + "properties": { + "dsn": { + "description": "Sentry DSN", + "type": [ + "string", + "null" + ], + "format": "uri", + "examples": [ + "https://public@host:port/1" + ] + }, + "environment": { + "description": "Environment to use when sending events to Sentry\n\n Defaults to `production` if not set.", + "type": [ + "string", + "null" + ], + "examples": [ + "production" + ] + }, + "sample_rate": { + "description": "Sample rate for event submissions\n\n Defaults to `1.0` if not set.", + "type": [ + "number", + "null" + ], + "format": "float", + "examples": [ + 0.5 + ], + "minimum": 0.0, + "maximum": 1.0 + }, + "traces_sample_rate": { + "description": "Sample rate for tracing transactions\n\n Defaults to `0.0` if not set.", + "type": [ + "number", + "null" + ], + "format": "float", + "examples": [ + 0.5 + ], + "minimum": 0.0, + "maximum": 1.0 + } + } + }, + "TemplatesConfig": { + "description": "Configuration related to templates", + "type": "object", + "properties": { + "path": { + "description": "Path to the folder which holds the templates", + "type": [ + "string", + "null" + ] + }, + "assets_manifest": { + "description": "Path to the assets manifest", + "type": [ + "string", + "null" + ] + }, + "translations_path": { + "description": "Path to the translations", + "type": [ + "string", + "null" + ] + } + } + }, + "EmailConfig": { + "description": "Configuration related to sending emails", + "type": "object", + "properties": { + "from": { + "description": "Email address to use as From when sending emails", + "type": "string", + "format": "email", + "default": "\"Authentication Service\" " + }, + "reply_to": { + "description": "Email address to use as Reply-To when sending emails", + "type": "string", + "format": "email", + "default": "\"Authentication Service\" " + }, + "transport": { + "description": "What backend should be used when sending emails", + "allOf": [ + { + "$ref": "#/definitions/EmailTransportKind" + } + ] + }, + "mode": { + "description": "SMTP transport: Connection mode to the relay", + "anyOf": [ + { + "$ref": "#/definitions/EmailSmtpMode" + }, + { + "type": "null" + } + ] + }, + "hostname": { + "description": "SMTP transport: Hostname to connect to", + "anyOf": [ + { + "$ref": "#/definitions/Hostname" + }, + { + "type": "null" + } + ] + }, + "port": { + "description": "SMTP transport: Port to connect to. Default is 25 for plain, 465 for TLS\n and 587 for `StartTLS`", + "type": [ + "integer", + "null" + ], + "format": "uint16", + "minimum": 1, + "maximum": 65535 + }, + "username": { + "description": "SMTP transport: Username for use to authenticate when connecting to the\n SMTP server\n\n Must be set if the `password` field is set", + "type": [ + "string", + "null" + ] + }, + "password": { + "description": "SMTP transport: Password for use to authenticate when connecting to the\n SMTP server\n\n Must be set if the `username` field is set", + "type": [ + "string", + "null" + ] + }, + "command": { + "description": "Sendmail transport: Command to use to send emails", + "type": [ + "string", + "null" + ], + "default": "sendmail" + } + }, + "required": [ + "transport" + ] + }, + "EmailTransportKind": { + "description": "What backend should be used when sending emails", + "oneOf": [ + { + "description": "Don't send emails anywhere", + "type": "string", + "const": "blackhole" + }, + { + "description": "Send emails via an SMTP relay", + "type": "string", + "const": "smtp" + }, + { + "description": "Send emails by calling sendmail", + "type": "string", + "const": "sendmail" + } + ] + }, + "EmailSmtpMode": { + "description": "Encryption mode to use", + "oneOf": [ + { + "description": "Plain text", + "type": "string", + "const": "plain" + }, + { + "description": "`StartTLS` (starts as plain text then upgrade to TLS)", + "type": "string", + "const": "starttls" + }, + { + "description": "TLS", + "type": "string", + "const": "tls" + } + ] + }, + "SecretsConfig": { + "description": "Application secrets", + "type": "object", + "properties": { + "encryption_file": { + "description": "File containing the encryption key for secure cookies.", + "type": [ + "string", + "null" + ] + }, + "encryption": { + "description": "Encryption key for secure cookies.", + "type": [ + "string", + "null" + ], + "examples": [ + "0000111122223333444455556666777788889999aaaabbbbccccddddeeeeffff" + ], + "pattern": "[0-9a-fA-F]{64}" + }, + "keys": { + "description": "List of private keys to use for signing and encrypting payloads.", + "type": [ + "array", + "null" + ], + "items": { + "$ref": "#/definitions/KeyConfig" + } + }, + "keys_dir": { + "description": "Directory of private keys to use for signing and encrypting payloads.", + "type": [ + "string", + "null" + ] + } + } + }, + "KeyConfig": { + "description": "A single key with its key ID and optional password.", + "type": "object", + "properties": { + "kid": { + "description": "The key ID `kid` of the key as used by JWKs.\n\n If not given, `kid` will be the key’s RFC 7638 JWK Thumbprint.", + "type": [ + "string", + "null" + ] + }, + "password_file": { + "type": [ + "string", + "null" + ] + }, + "password": { + "type": [ + "string", + "null" + ] + }, + "key_file": { + "type": [ + "string", + "null" + ] + }, + "key": { + "type": [ + "string", + "null" + ] + } + } + }, + "PasswordsConfig": { + "description": "User password hashing config", + "type": "object", + "properties": { + "enabled": { + "description": "Whether password-based authentication is enabled", + "type": "boolean", + "default": true + }, + "schemes": { + "description": "The hashing schemes to use for hashing and validating passwords\n\n The hashing scheme with the highest version number will be used for\n hashing new passwords.", + "type": "array", + "items": { + "$ref": "#/definitions/HashingScheme" + }, + "default": [ + { + "version": 1, + "algorithm": "argon2id" + } + ] + }, + "minimum_complexity": { + "description": "Score between 0 and 4 determining the minimum allowed password\n complexity. Scores are based on the ESTIMATED number of guesses\n needed to guess the password.\n\n - 0: less than 10^2 (100)\n - 1: less than 10^4 (10'000)\n - 2: less than 10^6 (1'000'000)\n - 3: less than 10^8 (100'000'000)\n - 4: any more than that", + "type": "integer", + "format": "uint8", + "minimum": 0, + "maximum": 255, + "default": 3 + } + } + }, + "HashingScheme": { + "description": "Parameters for a password hashing scheme", + "type": "object", + "properties": { + "version": { + "description": "The version of the hashing scheme. They must be unique, and the highest\n version will be used for hashing new passwords.", + "type": "integer", + "format": "uint16", + "minimum": 0, + "maximum": 65535 + }, + "algorithm": { + "description": "The hashing algorithm to use", + "allOf": [ + { + "$ref": "#/definitions/Algorithm" + } + ] + }, + "unicode_normalization": { + "description": "Whether to apply Unicode normalization to the password before hashing\n\n Defaults to `false`, and generally recommended to stay false. This is\n although recommended when importing password hashs from Synapse, as it\n applies an NFKC normalization to the password before hashing it.", + "type": "boolean" + }, + "cost": { + "description": "Cost for the bcrypt algorithm", + "type": [ + "integer", + "null" + ], + "format": "uint32", + "minimum": 0, + "default": 12 + }, + "secret": { + "description": "An optional secret to use when hashing passwords. This makes it harder\n to brute-force the passwords in case of a database leak.", + "type": [ + "string", + "null" + ] + }, + "secret_file": { + "description": "Same as `secret`, but read from a file.", + "type": [ + "string", + "null" + ] + } + }, + "required": [ + "version", + "algorithm" + ] + }, + "Algorithm": { + "description": "A hashing algorithm", + "oneOf": [ + { + "description": "bcrypt", + "type": "string", + "const": "bcrypt" + }, + { + "description": "argon2id", + "type": "string", + "const": "argon2id" + }, + { + "description": "PBKDF2", + "type": "string", + "const": "pbkdf2" + } + ] + }, + "MatrixConfig": { + "description": "Configuration related to the Matrix homeserver", + "type": "object", + "properties": { + "kind": { + "description": "The kind of homeserver it is.", + "default": "synapse", + "allOf": [ + { + "$ref": "#/definitions/HomeserverKind" + } + ] + }, + "homeserver": { + "description": "The server name of the homeserver.", + "type": "string", + "default": "localhost:8008" + }, + "secret_file": { + "type": [ + "string", + "null" + ] + }, + "secret": { + "type": [ + "string", + "null" + ] + }, + "endpoint": { + "description": "The base URL of the homeserver's client API", + "type": "string", + "format": "uri", + "default": "http://localhost:8008/" + } + } + }, + "HomeserverKind": { + "description": "The kind of homeserver it is.", + "oneOf": [ + { + "description": "Homeserver is Synapse, version 1.135.0 or newer", + "type": "string", + "const": "synapse" + }, + { + "description": "Homeserver is Synapse, version 1.135.0 or newer, in read-only mode\n\n This is meant for testing rolling out Matrix Authentication Service with\n no risk of writing data to the homeserver.", + "type": "string", + "const": "synapse_read_only" + }, + { + "description": "Homeserver is Synapse, using the legacy API", + "type": "string", + "const": "synapse_legacy" + }, + { + "description": "Homeserver is Synapse, with the modern API available (>= 1.135.0)", + "type": "string", + "const": "synapse_modern" + } + ] + }, + "PolicyConfig": { + "description": "Application secrets", + "type": "object", + "properties": { + "wasm_module": { + "description": "Path to the WASM module", + "type": "string" + }, + "client_registration_entrypoint": { + "description": "Entrypoint to use when evaluating client registrations", + "type": "string" + }, + "register_entrypoint": { + "description": "Entrypoint to use when evaluating user registrations", + "type": "string" + }, + "authorization_grant_entrypoint": { + "description": "Entrypoint to use when evaluating authorization grants", + "type": "string" + }, + "compat_login_entrypoint": { + "description": "Entrypoint to use when evaluating compatibility logins", + "type": "string" + }, + "password_entrypoint": { + "description": "Entrypoint to use when changing password", + "type": "string" + }, + "email_entrypoint": { + "description": "Entrypoint to use when adding an email address", + "type": "string" + }, + "data": { + "description": "Arbitrary data to pass to the policy" + } + } + }, + "RateLimitingConfig": { + "description": "Configuration related to sending emails", + "type": "object", + "properties": { + "account_recovery": { + "description": "Account Recovery-specific rate limits", + "default": { + "per_ip": { + "burst": 3, + "per_second": 0.0008333333333333334 + }, + "per_address": { + "burst": 3, + "per_second": 0.0002777777777777778 + } + }, + "allOf": [ + { + "$ref": "#/definitions/AccountRecoveryRateLimitingConfig" + } + ] + }, + "login": { + "description": "Login-specific rate limits", + "default": { + "per_ip": { + "burst": 3, + "per_second": 0.05 + }, + "per_account": { + "burst": 1800, + "per_second": 0.5 + } + }, + "allOf": [ + { + "$ref": "#/definitions/LoginRateLimitingConfig" + } + ] + }, + "registration": { + "description": "Controls how many registrations attempts are permitted\n based on source address.", + "default": { + "burst": 3, + "per_second": 0.0008333333333333334 + }, + "allOf": [ + { + "$ref": "#/definitions/RateLimiterConfiguration" + } + ] + }, + "email_authentication": { + "description": "Email authentication-specific rate limits", + "default": { + "per_ip": { + "burst": 5, + "per_second": 0.016666666666666666 + }, + "per_address": { + "burst": 3, + "per_second": 0.0002777777777777778 + }, + "emails_per_session": { + "burst": 2, + "per_second": 0.0033333333333333335 + }, + "attempt_per_session": { + "burst": 10, + "per_second": 0.016666666666666666 + } + }, + "allOf": [ + { + "$ref": "#/definitions/EmailauthenticationRateLimitingConfig" + } + ] + } + } + }, + "AccountRecoveryRateLimitingConfig": { + "type": "object", + "properties": { + "per_ip": { + "description": "Controls how many account recovery attempts are permitted\n based on source IP address.\n This can protect against causing e-mail spam to many targets.\n\n Note: this limit also applies to re-sends.", + "default": { + "burst": 3, + "per_second": 0.0008333333333333334 + }, + "allOf": [ + { + "$ref": "#/definitions/RateLimiterConfiguration" + } + ] + }, + "per_address": { + "description": "Controls how many account recovery attempts are permitted\n based on the e-mail address entered into the recovery form.\n This can protect against causing e-mail spam to one target.\n\n Note: this limit also applies to re-sends.", + "default": { + "burst": 3, + "per_second": 0.0002777777777777778 + }, + "allOf": [ + { + "$ref": "#/definitions/RateLimiterConfiguration" + } + ] + } + } + }, + "RateLimiterConfiguration": { + "type": "object", + "properties": { + "burst": { + "description": "A one-off burst of actions that the user can perform\n in one go without waiting.", + "type": "integer", + "format": "uint32", + "minimum": 1 + }, + "per_second": { + "description": "How quickly the allowance replenishes, in number of actions per second.\n Can be fractional to replenish slower.", + "type": "number", + "format": "double" + } + }, + "required": [ + "burst", + "per_second" + ] + }, + "LoginRateLimitingConfig": { + "type": "object", + "properties": { + "per_ip": { + "description": "Controls how many login attempts are permitted\n based on source IP address.\n This can protect against brute force login attempts.\n\n Note: this limit also applies to password checks when a user attempts to\n change their own password.", + "default": { + "burst": 3, + "per_second": 0.05 + }, + "allOf": [ + { + "$ref": "#/definitions/RateLimiterConfiguration" + } + ] + }, + "per_account": { + "description": "Controls how many login attempts are permitted\n based on the account that is being attempted to be logged into.\n This can protect against a distributed brute force attack\n but should be set high enough to prevent someone's account being\n casually locked out.\n\n Note: this limit also applies to password checks when a user attempts to\n change their own password.", + "default": { + "burst": 1800, + "per_second": 0.5 + }, + "allOf": [ + { + "$ref": "#/definitions/RateLimiterConfiguration" + } + ] + } + } + }, + "EmailauthenticationRateLimitingConfig": { + "type": "object", + "properties": { + "per_ip": { + "description": "Controls how many email authentication attempts are permitted\n based on the source IP address.\n This can protect against causing e-mail spam to many targets.", + "default": { + "burst": 5, + "per_second": 0.016666666666666666 + }, + "allOf": [ + { + "$ref": "#/definitions/RateLimiterConfiguration" + } + ] + }, + "per_address": { + "description": "Controls how many email authentication attempts are permitted\n based on the e-mail address entered into the authentication form.\n This can protect against causing e-mail spam to one target.\n\n Note: this limit also applies to re-sends.", + "default": { + "burst": 3, + "per_second": 0.0002777777777777778 + }, + "allOf": [ + { + "$ref": "#/definitions/RateLimiterConfiguration" + } + ] + }, + "emails_per_session": { + "description": "Controls how many authentication emails are permitted to be sent per\n authentication session. This ensures not too many authentication codes\n are created for the same authentication session.", + "default": { + "burst": 2, + "per_second": 0.0033333333333333335 + }, + "allOf": [ + { + "$ref": "#/definitions/RateLimiterConfiguration" + } + ] + }, + "attempt_per_session": { + "description": "Controls how many code authentication attempts are permitted per\n authentication session. This can protect against brute-forcing the\n code.", + "default": { + "burst": 10, + "per_second": 0.016666666666666666 + }, + "allOf": [ + { + "$ref": "#/definitions/RateLimiterConfiguration" + } + ] + } + } + }, + "UpstreamOAuth2Config": { + "description": "Upstream OAuth 2.0 providers configuration", + "type": "object", + "properties": { + "providers": { + "description": "List of OAuth 2.0 providers", + "type": "array", + "items": { + "$ref": "#/definitions/Provider" + } + } + }, + "required": [ + "providers" + ] + }, + "Provider": { + "description": "Configuration for one upstream OAuth 2 provider.", + "type": "object", + "properties": { + "enabled": { + "description": "Whether this provider is enabled.\n\n Defaults to `true`", + "type": "boolean" + }, + "id": { + "description": "A ULID as per https://github.com/ulid/spec", + "type": "string", + "pattern": "^[0123456789ABCDEFGHJKMNPQRSTVWXYZ]{26}$" + }, + "synapse_idp_id": { + "description": "The ID of the provider that was used by Synapse.\n In order to perform a Synapse-to-MAS migration, this must be specified.\n\n ## For providers that used OAuth 2.0 or OpenID Connect in Synapse\n\n ### For `oidc_providers`:\n This should be specified as `oidc-` followed by the ID that was\n configured as `idp_id` in one of the `oidc_providers` in the Synapse\n configuration.\n For example, if Synapse's configuration contained `idp_id: wombat` for\n this provider, then specify `oidc-wombat` here.\n\n ### For `oidc_config` (legacy):\n Specify `oidc` here.", + "type": [ + "string", + "null" + ] + }, + "issuer": { + "description": "The OIDC issuer URL\n\n This is required if OIDC discovery is enabled (which is the default)", + "type": [ + "string", + "null" + ] + }, + "human_name": { + "description": "A human-readable name for the provider, that will be shown to users", + "type": [ + "string", + "null" + ] + }, + "brand_name": { + "description": "A brand identifier used to customise the UI, e.g. `apple`, `google`,\n `github`, etc.\n\n Values supported by the default template are:\n\n - `apple`\n - `google`\n - `facebook`\n - `github`\n - `gitlab`\n - `twitter`\n - `discord`", + "type": [ + "string", + "null" + ] + }, + "client_id": { + "description": "The client ID to use when authenticating with the provider", + "type": "string" + }, + "client_secret_file": { + "description": "Path to the file containing the client secret. The client secret is used\n by the `client_secret_basic`, `client_secret_post` and\n `client_secret_jwt` authentication methods.", + "type": [ + "string", + "null" + ] + }, + "client_secret": { + "description": "Alternative to `client_secret_file`: Reads the client secret directly\n from the config.", + "type": [ + "string", + "null" + ] + }, + "token_endpoint_auth_method": { + "description": "The method to authenticate the client with the provider", + "allOf": [ + { + "$ref": "#/definitions/TokenAuthMethod" + } + ] + }, + "sign_in_with_apple": { + "description": "Additional parameters for the `sign_in_with_apple` method", + "anyOf": [ + { + "$ref": "#/definitions/SignInWithApple" + }, + { + "type": "null" + } + ] + }, + "token_endpoint_auth_signing_alg": { + "description": "The JWS algorithm to use when authenticating the client with the\n provider\n\n Used by the `client_secret_jwt` and `private_key_jwt` methods", + "anyOf": [ + { + "$ref": "#/definitions/JsonWebSignatureAlg" + }, + { + "type": "null" + } + ] + }, + "id_token_signed_response_alg": { + "description": "Expected signature for the JWT payload returned by the token\n authentication endpoint.\n\n Defaults to `RS256`.", + "allOf": [ + { + "$ref": "#/definitions/JsonWebSignatureAlg" + } + ] + }, + "scope": { + "description": "The scopes to request from the provider\n\n Defaults to `openid`.", + "type": "string" + }, + "discovery_mode": { + "description": "How to discover the provider's configuration\n\n Defaults to `oidc`, which uses OIDC discovery with strict metadata\n verification", + "allOf": [ + { + "$ref": "#/definitions/DiscoveryMode" + } + ] + }, + "pkce_method": { + "description": "Whether to use proof key for code exchange (PKCE) when requesting and\n exchanging the token.\n\n Defaults to `auto`, which uses PKCE if the provider supports it.", + "allOf": [ + { + "$ref": "#/definitions/PkceMethod" + } + ] + }, + "fetch_userinfo": { + "description": "Whether to fetch the user profile from the userinfo endpoint,\n or to rely on the data returned in the `id_token` from the\n `token_endpoint`.\n\n Defaults to `false`.", + "type": "boolean", + "default": false + }, + "userinfo_signed_response_alg": { + "description": "Expected signature for the JWT payload returned by the userinfo\n endpoint.\n\n If not specified, the response is expected to be an unsigned JSON\n payload.", + "anyOf": [ + { + "$ref": "#/definitions/JsonWebSignatureAlg" + }, + { + "type": "null" + } + ] + }, + "authorization_endpoint": { + "description": "The URL to use for the provider's authorization endpoint\n\n Defaults to the `authorization_endpoint` provided through discovery", + "type": [ + "string", + "null" + ], + "format": "uri" + }, + "userinfo_endpoint": { + "description": "The URL to use for the provider's userinfo endpoint\n\n Defaults to the `userinfo_endpoint` provided through discovery", + "type": [ + "string", + "null" + ], + "format": "uri" + }, + "token_endpoint": { + "description": "The URL to use for the provider's token endpoint\n\n Defaults to the `token_endpoint` provided through discovery", + "type": [ + "string", + "null" + ], + "format": "uri" + }, + "jwks_uri": { + "description": "The URL to use for getting the provider's public keys\n\n Defaults to the `jwks_uri` provided through discovery", + "type": [ + "string", + "null" + ], + "format": "uri" + }, + "response_mode": { + "description": "The response mode we ask the provider to use for the callback", + "anyOf": [ + { + "$ref": "#/definitions/ResponseMode" + }, + { + "type": "null" + } + ] + }, + "claims_imports": { + "description": "How claims should be imported from the `id_token` provided by the\n provider", + "allOf": [ + { + "$ref": "#/definitions/ClaimsImports" + } + ] + }, + "additional_authorization_parameters": { + "description": "Additional parameters to include in the authorization request\n\n Orders of the keys are not preserved.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "forward_login_hint": { + "description": "Whether the `login_hint` should be forwarded to the provider in the\n authorization request.\n\n Defaults to `false`.", + "type": "boolean", + "default": false + }, + "on_backchannel_logout": { + "description": "What to do when receiving an OIDC Backchannel logout request.\n\n Defaults to `do_nothing`.", + "allOf": [ + { + "$ref": "#/definitions/OnBackchannelLogout" + } + ] + } + }, + "required": [ + "id", + "client_id", + "token_endpoint_auth_method" + ] + }, + "TokenAuthMethod": { + "description": "Authentication methods used against the OAuth 2.0 provider", + "oneOf": [ + { + "description": "`none`: No authentication", + "type": "string", + "const": "none" + }, + { + "description": "`client_secret_basic`: `client_id` and `client_secret` used as basic\n authorization credentials", + "type": "string", + "const": "client_secret_basic" + }, + { + "description": "`client_secret_post`: `client_id` and `client_secret` sent in the\n request body", + "type": "string", + "const": "client_secret_post" + }, + { + "description": "`client_secret_jwt`: a `client_assertion` sent in the request body and\n signed using the `client_secret`", + "type": "string", + "const": "client_secret_jwt" + }, + { + "description": "`private_key_jwt`: a `client_assertion` sent in the request body and\n signed by an asymmetric key", + "type": "string", + "const": "private_key_jwt" + }, + { + "description": "`sign_in_with_apple`: a special method for Signin with Apple", + "type": "string", + "const": "sign_in_with_apple" + } + ] + }, + "SignInWithApple": { + "type": "object", + "properties": { + "private_key_file": { + "description": "The private key file used to sign the `id_token`", + "type": [ + "string", + "null" + ] + }, + "private_key": { + "description": "The private key used to sign the `id_token`", + "type": [ + "string", + "null" + ] + }, + "team_id": { + "description": "The Team ID of the Apple Developer Portal", + "type": "string" + }, + "key_id": { + "description": "The key ID of the Apple Developer Portal", + "type": "string" + } + }, + "required": [ + "team_id", + "key_id" + ] + }, + "DiscoveryMode": { + "description": "How to discover the provider's configuration", + "oneOf": [ + { + "description": "Use OIDC discovery with strict metadata verification", + "type": "string", + "const": "oidc" + }, + { + "description": "Use OIDC discovery with relaxed metadata verification", + "type": "string", + "const": "insecure" + }, + { + "description": "Use a static configuration", + "type": "string", + "const": "disabled" + } + ] + }, + "PkceMethod": { + "description": "Whether to use proof key for code exchange (PKCE) when requesting and\n exchanging the token.", + "oneOf": [ + { + "description": "Use PKCE if the provider supports it\n\n Defaults to no PKCE if provider discovery is disabled", + "type": "string", + "const": "auto" + }, + { + "description": "Always use PKCE with the S256 challenge method", + "type": "string", + "const": "always" + }, + { + "description": "Never use PKCE", + "type": "string", + "const": "never" + } + ] + }, + "ResponseMode": { + "description": "The response mode we ask the provider to use for the callback", + "oneOf": [ + { + "description": "`query`: The provider will send the response as a query string in the\n URL search parameters", + "type": "string", + "const": "query" + }, + { + "description": "`form_post`: The provider will send the response as a POST request with\n the response parameters in the request body\n\n ", + "type": "string", + "const": "form_post" + } + ] + }, + "ClaimsImports": { + "description": "How claims should be imported", + "type": "object", + "properties": { + "subject": { + "description": "How to determine the subject of the user", + "allOf": [ + { + "$ref": "#/definitions/SubjectImportPreference" + } + ] + }, + "skip_confirmation": { + "description": "Whether to skip the interactive screen prompting the user to confirm the\n attributes that are being imported. This requires `localpart.action` to\n be `require` and other attribute actions to be either `ignore`, `force`\n or `require`", + "type": "boolean" + }, + "localpart": { + "description": "Import the localpart of the MXID", + "allOf": [ + { + "$ref": "#/definitions/LocalpartImportPreference" + } + ] + }, + "displayname": { + "description": "Import the displayname of the user.", + "allOf": [ + { + "$ref": "#/definitions/DisplaynameImportPreference" + } + ] + }, + "email": { + "description": "Import the email address of the user", + "allOf": [ + { + "$ref": "#/definitions/EmailImportPreference" + } + ] + }, + "account_name": { + "description": "Set a human-readable name for the upstream account for display purposes", + "allOf": [ + { + "$ref": "#/definitions/AccountNameImportPreference" + } + ] + } + } + }, + "SubjectImportPreference": { + "description": "What should be done for the subject attribute", + "type": "object", + "properties": { + "template": { + "description": "The Jinja2 template to use for the subject attribute\n\n If not provided, the default template is `{{ user.sub }}`", + "type": [ + "string", + "null" + ] + } + } + }, + "LocalpartImportPreference": { + "description": "What should be done for the localpart attribute", + "type": "object", + "properties": { + "action": { + "description": "How to handle the attribute", + "allOf": [ + { + "$ref": "#/definitions/ImportAction" + } + ] + }, + "template": { + "description": "The Jinja2 template to use for the localpart attribute\n\n If not provided, the default template is `{{ user.preferred_username }}`", + "type": [ + "string", + "null" + ] + }, + "on_conflict": { + "description": "How to handle conflicts on the claim, default value is `Fail`", + "allOf": [ + { + "$ref": "#/definitions/OnConflict" + } + ] + } + } + }, + "ImportAction": { + "description": "How to handle a claim", + "oneOf": [ + { + "description": "Ignore the claim", + "type": "string", + "const": "ignore" + }, + { + "description": "Suggest the claim value, but allow the user to change it", + "type": "string", + "const": "suggest" + }, + { + "description": "Force the claim value, but don't fail if it is missing", + "type": "string", + "const": "force" + }, + { + "description": "Force the claim value, and fail if it is missing", + "type": "string", + "const": "require" + } + ] + }, + "OnConflict": { + "description": "How to handle an existing localpart claim", + "oneOf": [ + { + "description": "Fails the upstream OAuth 2.0 login on conflict", + "type": "string", + "const": "fail" + }, + { + "description": "Adds the upstream OAuth 2.0 identity link, regardless of whether there\n is an existing link or not", + "type": "string", + "const": "add" + }, + { + "description": "Replace any existing upstream OAuth 2.0 identity link", + "type": "string", + "const": "replace" + }, + { + "description": "Adds the upstream OAuth 2.0 identity link *only* if there is no existing\n link for this provider on the matching user", + "type": "string", + "const": "set" + } + ] + }, + "DisplaynameImportPreference": { + "description": "What should be done for the displayname attribute", + "type": "object", + "properties": { + "action": { + "description": "How to handle the attribute", + "allOf": [ + { + "$ref": "#/definitions/ImportAction" + } + ] + }, + "template": { + "description": "The Jinja2 template to use for the displayname attribute\n\n If not provided, the default template is `{{ user.name }}`", + "type": [ + "string", + "null" + ] + } + } + }, + "EmailImportPreference": { + "description": "What should be done with the email attribute", + "type": "object", + "properties": { + "action": { + "description": "How to handle the claim", + "allOf": [ + { + "$ref": "#/definitions/ImportAction" + } + ] + }, + "template": { + "description": "The Jinja2 template to use for the email address attribute\n\n If not provided, the default template is `{{ user.email }}`", + "type": [ + "string", + "null" + ] + } + } + }, + "AccountNameImportPreference": { + "description": "What should be done for the account name attribute", + "type": "object", + "properties": { + "template": { + "description": "The Jinja2 template to use for the account name. This name is only used\n for display purposes.\n\n If not provided, it will be ignored.", + "type": [ + "string", + "null" + ] + } + } + }, + "OnBackchannelLogout": { + "description": "What to do when receiving an OIDC Backchannel logout request.", + "oneOf": [ + { + "description": "Do nothing", + "type": "string", + "const": "do_nothing" + }, + { + "description": "Only log out the MAS 'browser session' started by this OIDC session", + "type": "string", + "const": "logout_browser_only" + }, + { + "description": "Log out all sessions started by this OIDC session, including MAS\n 'browser sessions' and client sessions", + "type": "string", + "const": "logout_all" + } + ] + }, + "BrandingConfig": { + "description": "Configuration section for tweaking the branding of the service", + "type": "object", + "properties": { + "service_name": { + "description": "A human-readable name. Defaults to the server's address.", + "type": [ + "string", + "null" + ] + }, + "policy_uri": { + "description": "Link to a privacy policy, displayed in the footer of web pages and\n emails. It is also advertised to clients through the `op_policy_uri`\n OIDC provider metadata.", + "type": [ + "string", + "null" + ], + "format": "uri" + }, + "tos_uri": { + "description": "Link to a terms of service document, displayed in the footer of web\n pages and emails. It is also advertised to clients through the\n `op_tos_uri` OIDC provider metadata.", + "type": [ + "string", + "null" + ], + "format": "uri" + }, + "imprint": { + "description": "Legal imprint, displayed in the footer in the footer of web pages and\n emails.", + "type": [ + "string", + "null" + ] + }, + "logo_uri": { + "description": "Logo displayed in some web pages.", + "type": [ + "string", + "null" + ], + "format": "uri" + } + } + }, + "CaptchaConfig": { + "description": "Configuration section to setup CAPTCHA protection on a few operations", + "type": "object", + "properties": { + "service": { + "description": "Which service should be used for CAPTCHA protection", + "anyOf": [ + { + "$ref": "#/definitions/CaptchaServiceKind" + }, + { + "type": "null" + } + ] + }, + "site_key": { + "description": "The site key to use", + "type": [ + "string", + "null" + ] + }, + "secret_key": { + "description": "The secret key to use", + "type": [ + "string", + "null" + ] + } + } + }, + "CaptchaServiceKind": { + "description": "Which service should be used for CAPTCHA protection", + "oneOf": [ + { + "description": "Use Google's reCAPTCHA v2 API", + "type": "string", + "const": "recaptcha_v2" + }, + { + "description": "Use Cloudflare Turnstile", + "type": "string", + "const": "cloudflare_turnstile" + }, + { + "description": "Use ``HCaptcha``", + "type": "string", + "const": "hcaptcha" + } + ] + }, + "AccountConfig": { + "description": "Configuration section to configure features related to account management", + "type": "object", + "properties": { + "email_change_allowed": { + "description": "Whether users are allowed to change their email addresses. Defaults to\n `true`.", + "type": "boolean" + }, + "displayname_change_allowed": { + "description": "Whether users are allowed to change their display names. Defaults to\n `true`.\n\n This should be in sync with the policy in the homeserver configuration.", + "type": "boolean" + }, + "password_registration_enabled": { + "description": "Whether to enable self-service password registration. Defaults to\n `false` if password authentication is enabled.\n\n This has no effect if password login is disabled.", + "type": "boolean" + }, + "password_registration_email_required": { + "description": "Whether self-service password registrations require a valid email.\n Defaults to `true`.\n\n This has no effect if password registration is disabled.", + "type": "boolean" + }, + "password_change_allowed": { + "description": "Whether users are allowed to change their passwords. Defaults to `true`.\n\n This has no effect if password login is disabled.", + "type": "boolean" + }, + "password_recovery_enabled": { + "description": "Whether email-based password recovery is enabled. Defaults to `false`.\n\n This has no effect if password login is disabled.", + "type": "boolean" + }, + "account_deactivation_allowed": { + "description": "Whether users are allowed to delete their own account. Defaults to\n `true`.", + "type": "boolean" + }, + "login_with_email_allowed": { + "description": "Whether users can log in with their email address. Defaults to `false`.\n\n This has no effect if password login is disabled.", + "type": "boolean" + }, + "registration_token_required": { + "description": "Whether registration tokens are required for password registrations.\n Defaults to `false`.\n\n When enabled, users must provide a valid registration token during\n password registration. This has no effect if password registration\n is disabled.", + "type": "boolean" + } + } + }, + "ExperimentalConfig": { + "description": "Configuration sections for experimental options\n\n Do not change these options unless you know what you are doing.", + "type": "object", + "properties": { + "access_token_ttl": { + "description": "Time-to-live of access tokens in seconds. Defaults to 5 minutes.", + "type": "integer", + "format": "uint64", + "minimum": 60, + "maximum": 86400 + }, + "compat_token_ttl": { + "description": "Time-to-live of compatibility access tokens in seconds. Defaults to 5\n minutes.", + "type": "integer", + "format": "uint64", + "minimum": 60, + "maximum": 86400 + }, + "inactive_session_expiration": { + "description": "Experimetal feature to automatically expire inactive sessions\n\n Disabled by default", + "anyOf": [ + { + "$ref": "#/definitions/InactiveSessionExpirationConfig" + }, + { + "type": "null" + } + ] + }, + "plan_management_iframe_uri": { + "description": "Experimental feature to show a plan management tab and iframe.\n This value is passed through \"as is\" to the client without any\n validation.", + "type": [ + "string", + "null" + ] + }, + "session_limit": { + "description": "Experimental feature to limit the number of application sessions per\n user.\n\n Disabled by default.", + "anyOf": [ + { + "$ref": "#/definitions/SessionLimitConfig" + }, + { + "type": "null" + } + ] + } + } + }, + "InactiveSessionExpirationConfig": { + "description": "Configuration options for the inactive session expiration feature", + "type": "object", + "properties": { + "ttl": { + "description": "Time after which an inactive session is automatically finished", + "type": "integer", + "format": "uint64", + "minimum": 600, + "maximum": 7776000 + }, + "expire_compat_sessions": { + "description": "Should compatibility sessions expire after inactivity", + "type": "boolean", + "default": true + }, + "expire_oauth_sessions": { + "description": "Should OAuth 2.0 sessions expire after inactivity", + "type": "boolean", + "default": true + }, + "expire_user_sessions": { + "description": "Should user sessions expire after inactivity", + "type": "boolean", + "default": true + } + }, + "required": [ + "ttl" + ] + }, + "SessionLimitConfig": { + "description": "Configuration options for the session limit feature", + "type": "object", + "properties": { + "soft_limit": { + "type": "integer", + "format": "uint64", + "minimum": 1 + }, + "hard_limit": { + "type": "integer", + "format": "uint64", + "minimum": 1 + } + }, + "required": [ + "soft_limit", + "hard_limit" + ] + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/docs/development/architecture.md b/matrix-authentication-service/docs/development/architecture.md new file mode 100644 index 00000000..0b0f14ef --- /dev/null +++ b/matrix-authentication-service/docs/development/architecture.md @@ -0,0 +1,113 @@ +# Architecture + +The service is meant to be easily embeddable, with only a dependency to a database. +It is also meant to stay lightweight in terms of resource usage and easily scalable horizontally. + +## Scope and goals + +The Matrix Authentication Service has been created to support the migration of Matrix to an OpenID Connect (OIDC) based architecture as per [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861). + +It is not intended to be a general purpose Identity Provider (IdP) and instead focuses on the specific needs of Matrix. + +Furthermore, it is only intended that it would speak OIDC for authentication and not other protocols. Instead, if you want to connect to an upstream SAML, CAS or LDAP backend then you need to pair MAS with a separate service (such as [Dex](https://dexidp.io) or [Keycloak](https://www.keycloak.org)) which does that translation for you. + +Whilst it only supports use with Synapse today, we hope that other homeservers will become supported in future. + +If you need some other feature that MAS doesn't support (such as TOTP or WebAuthn), then you should consider pairing MAS with another IdP that does support the features you need. + +## Workspace and crate split + +The whole repository is a [Cargo Workspace](https://doc.rust-lang.org/book/ch14-03-cargo-workspaces.html) that includes multiple crates under the `/crates` directory. + +This includes: + + - `mas-cli`: Command line utility, main entry point + - [`mas-config`][mas-config]: Configuration parsing and loading + - [`mas-data-model`][mas-data-model]: Models of objects that live in the database, regardless of the storage backend + - [`mas-email`][mas-email]: High-level email sending abstraction + - [`mas-handlers`][mas-handlers]: Main HTTP application logic + - [`mas-iana`][mas-iana]: Auto-generated enums from IANA registries + - [`mas-iana-codegen`][mas-iana-codegen]: Code generator for the `mas-iana` crate + - [`mas-jose`][mas-jose]: JWT/JWS/JWE/JWK abstraction + - [`mas-static-files`][mas-static-files]: Frontend static files (CSS/JS). Includes some frontend tooling + - [`mas-storage`][mas-storage]: Abstraction of the storage backends + - [`mas-storage-pg`][mas-storage-pg]: Storage backend implementation for a PostgreSQL database + - [`mas-tasks`][mas-tasks]: Asynchronous task runner and scheduler + - [`oauth2-types`][oauth2-types]: Useful structures and types to deal with OAuth 2.0/OpenID Connect endpoints. This might end up published as a standalone library as it can be useful in other contexts. + +[mas-config]: ../rustdoc/mas_config/index.html +[mas-data-model]: ../rustdoc/mas_data_model/index.html +[mas-email]: ../rustdoc/mas_email/index.html +[mas-handlers]: ../rustdoc/mas_handlers/index.html +[mas-iana]: ../rustdoc/mas_iana/index.html +[mas-iana-codegen]: ../rustdoc/mas_iana_codegen/index.html +[mas-jose]: ../rustdoc/mas_jose/index.html +[mas-static-files]: ../rustdoc/mas_static_files/index.html +[mas-storage]: ../rustdoc/mas_storage/index.html +[mas-storage-pg]: ../rustdoc/mas_storage/index.html +[mas-tasks]: ../rustdoc/mas_tasks/index.html +[oauth2-types]: ../rustdoc/oauth2_types/index.html + +## Important crates + +The project makes use of a few important crates. + +### Async runtime: `tokio` + +[Tokio](https://tokio.rs/) is the async runtime used by the project. +The choice of runtime does not have much impact on most of the code. + +It has an impact when: + + - spawning asynchronous work (as in "not awaiting on it immediately") + - running CPU-intensive tasks. They should be ran in a blocking context using `tokio::task::spawn_blocking`. This includes password hashing and other crypto operations. + - when dealing with shared memory, e.g. mutexes, rwlocks, etc. + +### Logging: `tracing` + +Logging is handled through the [`tracing`](https://docs.rs/tracing/*/tracing/) crate. +It provides a way to emit structured log messages at various levels. + +```rust +use tracing::{info, debug}; + +info!("Logging some things"); +debug!(user = "john", "Structured stuff"); +``` + +`tracing` also provides ways to create spans to better understand where a logging message comes from. +In the future, it will help building OpenTelemetry-compatible distributed traces to help with debugging. + +`tracing` is becoming the standard to log things in Rust. +By itself it will do nothing unless a subscriber is installed to -for example- log the events to the console. + +The CLI installs [`tracing-subcriber`](https://docs.rs/tracing-subscriber/*/tracing_subscriber/) on startup to log in the console. +It looks for a `RUST_LOG` environment variable to determine what event should be logged. + +### Error management: `thiserror` / `anyhow` + +[`thiserror`](https://docs.rs/thiserror/*/thiserror/) helps defining custom error types. +This is especially useful for errors that should be handled in a specific way, while being able to augment underlying errors with additional context. + +[`anyhow`](https://docs.rs/anyhow/*/anyhow/) helps dealing with chains of errors. +It allows for quickly adding additional context around an error while it is being propagated. + +Both crates work well together and complement each other. + +### Database interactions: `sqlx` + +Interactions with the database are done through [`sqlx`](https://github.com/launchbadge/sqlx), an async, pure-Rust SQL library with compile-time check of queries. +It also handles schema migrations. + +### Templates: `tera` + +[Tera](https://tera.netlify.app/) was chosen as template engine for its simplicity as well as its ability to load templates at runtime. +The builtin templates are embedded in the final binary through some macro magic. + +The downside of Tera compared to compile-time template engines is the possibility of runtime crashes. +This can however be somewhat mitigated with unit tests. + +### Crates from *RustCrypto* + +The [RustCrypto team](https://github.com/RustCrypto) offer high quality, independent crates for dealing with cryptography. +The whole project is highly modular and APIs are coherent between crates. diff --git a/matrix-authentication-service/docs/development/cleanup-jobs.md b/matrix-authentication-service/docs/development/cleanup-jobs.md new file mode 100644 index 00000000..f64cb7f2 --- /dev/null +++ b/matrix-authentication-service/docs/development/cleanup-jobs.md @@ -0,0 +1,280 @@ +# Cleanup Jobs + +In MAS, most of the data are initially only soft-deleted, by setting a `deleted_at`, `finished_at`, `consumed_at` timestamp on the row, instead of actually deleting the row. +They are kept around for a short period of time, for audit purposes or to help with the user experience in some case. +This document describes the cleanup jobs in MAS which delete those stale rows after some time, including how to add new cleanup jobs and understand the existing ones. + +## Cleanup Job Architecture + +Cleanup jobs are scheduled tasks that hard-delete old data from the database. They follow a consistent pattern: + +1. **Job struct** in `crates/storage/src/queue/tasks.rs` - Defines the job and queue name +2. **Storage trait** in `crates/storage/src/{domain}/` - Declares the cleanup method interface +3. **PostgreSQL implementation** in `crates/storage-pg/src/{domain}/` - Implements the actual cleanup logic +4. **Job runner** in `crates/tasks/src/cleanup/` - Implements the `RunnableJob` trait with batching logic +5. **Registration** in `crates/tasks/src/lib.rs` - Registers the handler and schedules execution + +### Module Structure + +The cleanup job implementations are organized into submodules by domain: + +``` +crates/tasks/src/cleanup/ +├── mod.rs # Re-exports, shared BATCH_SIZE constant +├── tokens.rs # OAuth token cleanup (access and refresh tokens) +├── sessions.rs # Session cleanup (compat, OAuth2, user sessions and their IPs) +├── oauth.rs # OAuth grants and upstream OAuth cleanup +├── user.rs # User-related cleanup (registrations, recovery, email auth) +└── misc.rs # Queue jobs, policy data cleanup +``` + +## All Cleanup Jobs + +| Job | Entity | Retention | Notes | +|-----|--------|-----------|-------| +| `CleanupRevokedOAuthAccessTokensJob` | `oauth2_access_tokens` | 1 hour after `revoked_at` | | +| `CleanupExpiredOAuthAccessTokensJob` | `oauth2_access_tokens` | 30 days after `expires_at` | For idempotency | +| `CleanupRevokedOAuthRefreshTokensJob` | `oauth2_refresh_tokens` | 1 hour after `revoked_at` | | +| `CleanupConsumedOAuthRefreshTokensJob` | `oauth2_refresh_tokens` | 1 hour after `consumed_at` | | +| `CleanupUserRegistrationsJob` | `user_registrations` | 30 days | For abuse investigation | +| `CleanupFinishedCompatSessionsJob` | `compat_sessions` | 30 days after `finished_at` | Cascades to tokens | +| `CleanupFinishedOAuth2SessionsJob` | `oauth2_sessions` | 30 days after `finished_at` | Cascades to tokens | +| `CleanupFinishedUserSessionsJob` | `user_sessions` | 30 days after `finished_at` | Only if no child sessions | +| `CleanupOAuthAuthorizationGrantsJob` | `oauth2_authorization_grants` | 7 days | | +| `CleanupOAuthDeviceCodeGrantsJob` | `oauth2_device_code_grant` | 7 days | | +| `CleanupUserRecoverySessionsJob` | `user_recovery_sessions` | 7 days | Codes expire in 10 min | +| `CleanupUserEmailAuthenticationsJob` | `user_email_authentications` | 7 days | Codes expire in 10 min | +| `CleanupUpstreamOAuthSessionsJob` | `upstream_oauth_authorization_sessions` | 7 days (orphaned) | Where `user_session_id IS NULL` | +| `CleanupUpstreamOAuthLinksJob` | `upstream_oauth_links` | 7 days (orphaned) | Where `user_id IS NULL` | +| `CleanupInactiveOAuth2SessionIpsJob` | `oauth2_sessions.last_active_ip` | 30 days | Clears out IPs after inactivity | +| `CleanupInactiveCompatSessionIpsJob` | `compat_sessions.last_active_ip` | 30 days | Clears out IPs after inactivity | +| `CleanupInactiveUserSessionIpsJob` | `user_sessions.last_active_ip` | 30 days | Clears out IPs after inactivity | +| `CleanupQueueJobsJob` | `queue_jobs` | 30 days | Completed/failed jobs | + +## Session Cleanup and Backchannel Logout + +The session cleanup jobs must preserve the dependency chain required for backchannel logout to work correctly. + +### Backchannel Logout Flow + +When an upstream IdP sends a backchannel logout notification, MAS must trace through the session hierarchy to find and finish all related sessions: + +``` + Upstream IdP logout notification + │ + ▼ + ┌───────────────────────────────────────┐ + │ upstream_oauth_authorization_sessions │ + │ (matched by sub/sid claims) │ + └──────────────┬────────────────────────┘ + │ user_session_id + ▼ + ┌─────────────────────────────────────┐ + │ user_sessions │ + │ (browser sessions) │ + └──────────────┬──────────────────────┘ + │ user_session_id FK + ┌────┴──────────────┐ + │ │ + ▼ ▼ + ┌─────────────────┐ ┌─────────────────┐ + │ compat_sessions │ │ oauth2_sessions │ + └─────────────────┘ └─────────────────┘ +``` + +### Cleanup Order + +The cleanup jobs run in an order that respects this hierarchy: + +1. **Compat sessions** (`CleanupFinishedCompatSessionsJob`) + - Also deletes `compat_access_tokens`, `compat_refresh_tokens` +1. **OAuth2 sessions** (`CleanupFinishedOAuth2SessionsJob`) + - Also deletes `oauth2_access_tokens`, `oauth2_refresh_tokens` +1. **User sessions** (`CleanupFinishedUserSessionsJob`) + - Only deletes if NO `compat_sessions` or `oauth2_sessions` reference it. + This can make this job inefficient if there are lots of finished `user_sessions` that are still referenced by active `compat_sessions` or `oauth2_sessions`. + - Also deletes `user_session_authentications` + - Cascades to `SET NULL` the `user_session_id` on `upstream_oauth_authorization_sessions` +1. **Upstream OAuth authorization sessions** (`CleanupUpstreamOAuthSessionsJob`) + - Only deletes if `user_session_id` is `NULL`, so if the authentication session was never finished *or* the user session was cleaned up. + +### Why User Sessions Require Special Handling + +The `user_session_id` foreign key links must be preserved for backchannel logout to work: + +1. **Backchannel logout** traces: `upstream_oauth_authorization_sessions` → `user_sessions` → `compat_sessions`/`oauth2_sessions` +2. If `user_sessions` is deleted while child sessions exist, the link is broken and logout propagation fails +3. The `NOT EXISTS` checks in the cleanup query ensure we only delete `user_sessions` after all children are cleaned up +4. FK constraints (`ON DELETE NO ACTION`) provide a safety net - attempting to delete a referenced `user_session` will fail + +## Adding a New Cleanup Job + +### 1. Add Job Struct + +In `crates/storage/src/queue/tasks.rs`: + +```rust +/// Cleanup old foo records +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +pub struct CleanupFooJob; + +impl InsertableJob for CleanupFooJob { + const QUEUE_NAME: &'static str = "cleanup-foo"; +} +``` + +### 2. Add Storage Trait Method + +In `crates/storage/src/{domain}/foo.rs`, add to the repository trait and `repository_impl!` macro: + +```rust +async fn cleanup( + &mut self, + since: Option>, + until: DateTime, + limit: usize, +) -> Result<(usize, Option>), Self::Error>; +``` + +### 3. Implement in PostgreSQL + +In `crates/storage-pg/src/{domain}/foo.rs`, use the CTE pattern: + +```rust +async fn cleanup( + &mut self, + since: Option>, + until: DateTime, + limit: usize, +) -> Result<(usize, Option>), Self::Error> { + let res = sqlx::query!( + r#" + WITH + to_delete AS ( + SELECT id, timestamp_col + FROM table + WHERE timestamp_col IS NOT NULL + AND ($1::timestamptz IS NULL OR timestamp_col >= $1) + AND timestamp_col < $2 + ORDER BY timestamp_col ASC + LIMIT $3 + FOR UPDATE + ), + deleted AS ( + DELETE FROM table USING to_delete + WHERE table.id = to_delete.id + RETURNING timestamp_col + ) + SELECT COUNT(*) as "count!", MAX(timestamp_col) as last_timestamp FROM deleted + "#, + since, + until, + limit as i64, + ) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + Ok(( + res.count.try_into().unwrap_or(usize::MAX), + res.last_timestamp, + )) +} +``` + +### 4. Add Index Migration + +Make sure to add an index on that timestamp column used by this cleanup job: + +```sql +-- no-transaction +CREATE INDEX CONCURRENTLY IF NOT EXISTS "table_timestamp_idx" + ON "table" ("timestamp_col") + WHERE "timestamp_col" IS NOT NULL; +``` + +The partial index (`WHERE timestamp_col IS NOT NULL`) makes queries more efficient by only indexing rows that will actually be cleaned up. + +### 5. Implement RunnableJob + +In the appropriate submodule under `crates/tasks/src/cleanup/` (e.g., `tokens.rs`, `sessions.rs`, `oauth.rs`, `user.rs`, or `misc.rs`): + +```rust +#[async_trait] +impl RunnableJob for CleanupFooJob { + #[tracing::instrument(name = "job.cleanup_foo", skip_all)] + async fn run(&self, state: &State, context: JobContext) -> Result<(), JobError> { + // Cleanup records older than X days + let until = state.clock.now() - chrono::Duration::days(30); + let mut total = 0; + + let mut since = None; + while !context.cancellation_token.is_cancelled() { + let mut repo = state.repository().await.map_err(JobError::retry)?; + + let (count, last_timestamp) = repo + .foo() + .cleanup(since, until, BATCH_SIZE) + .await + .map_err(JobError::retry)?; + repo.save().await.map_err(JobError::retry)?; + + since = last_timestamp; + total += count; + + if count != BATCH_SIZE { + break; + } + } + + if total == 0 { + debug!("no foo records to clean up"); + } else { + info!(count = total, "cleaned up foo records"); + } + + Ok(()) + } + + fn timeout(&self) -> Option { + Some(Duration::from_secs(10 * 60)) + } +} +``` + +### 6. Register and Schedule + +In `crates/tasks/src/lib.rs`: + +```rust +// Add to register_handler chain +.register_handler::() + +// Add schedule +.add_schedule( + "cleanup-foo", + // Run this job every hour + "0 XX * * * *".parse()?, // Choose a minute offset + mas_storage::queue::CleanupFooJob, +) +``` + +## Implementation Notes + +### Batching Pattern + +All cleanup jobs use a batching pattern to avoid long-running transactions: + +- Process records in batches (typically 1000 at a time) +- Use pagination cursor (`since`) to track progress +- Create a new transaction for each batch +- Check for cancellation between batches +- Log total count at the end + +### Retention Policies + +Retention periods vary by use case: + +- **1 hour**: Revoked/consumed tokens (no longer useful) +- **7 days**: Short-lived grants/codes (abuse investigation) +- **30 days**: Sessions and registrations (longer audit trail) diff --git a/matrix-authentication-service/docs/development/contributing.md b/matrix-authentication-service/docs/development/contributing.md new file mode 100644 index 00000000..27367b94 --- /dev/null +++ b/matrix-authentication-service/docs/development/contributing.md @@ -0,0 +1,124 @@ +# Contributing + +This document aims to get you started with contributing to the Matrix Authentication Service! + +## 1. Who can contribute to MAS? + +Everyone is welcome to contribute code to [Matrix Authentication Service](https://github.com/element-hq/matrix-authentication-service), provided that they are willing to license their contributions to Element under a [Contributor License Agreement](https://cla-assistant.io/element-hq/matrix-authentication-service) (CLA). This ensures that their contribution will be made available under an OSI-approved open-source license, currently Affero General Public License v3 (AGPLv3). + +Please see the [Element blog post](https://element.io/blog/synapse-now-lives-at-github-com-element-hq-synapse/) for the full rationale. + +## 2. What can I contribute? + +There are two main ways to contribute to MAS: + +- **Code and documentation**: You can contribute code to the Matrix Authentication Service and help improve its documentation by submitting pull requests to the [GitHub repository](https://github.com/element-hq/matrix-authentication-service). +- **Translations**: You can contribute translations to the Matrix Authentication Service through [Localazy](https://localazy.com/p/matrix-authentication-service). + +## 3. What do I need? + +To get MAS running locally from source you will need to: + +- [Install Rust and Cargo](https://www.rust-lang.org/learn/get-started). We recommend using the latest stable version of Rust. +- [Install Node.js and npm](https://nodejs.org/). We recommend using the latest LTS version of Node.js. +- [Install Open Policy Agent](https://www.openpolicyagent.org/docs#1-download-opa) + +## 4. Get the source + +The preferred and easiest way to contribute changes is to fork the relevant project on GitHub and then [create a pull request]( https://help.github.com/articles/using-pull-requests/) to ask us to pull your changes into our repo. + +Please base your changes on the `main` branch. + +```sh +git clone git@github.com:YOUR_GITHUB_USER_NAME/matrix-authentication-service.git +cd matrix-authentication-service +git checkout main +``` + +If you need help getting started with git, this is beyond the scope of the document, but you can find many good git tutorials on the web. + +## 5. Build and run MAS + +- Build the frontend + ```sh + cd frontend + npm ci # Install the frontend dependencies + npm run build # Build the frontend + cd .. + ``` +- Build the Open Policy Agent policies + ```sh + cd policies + make + # OR, if you don't have `opa` installed and want to build through the OPA docker image + make DOCKER=1 + cd .. + ``` +- Generate the sample config via `cargo run -- config generate > config.yaml` +- Run a PostgreSQL database locally + ```sh + docker run -p 5432:5432 -e 'POSTGRES_USER=postgres' -e 'POSTGRES_PASSWORD=postgres' -e 'POSTGRES_DATABASE=postgres' postgres + ``` +- Update the database URI in `config.yaml` to `postgresql://postgres:postgres@localhost/postgres` +- Run the server via `cargo run -- server -c config.yaml` +- Go to + +## 6. Update generated files and format your code + +The project includes a few files that are automatically generated. +Most of them can be updated by running `sh misc/update.sh` at the root of the project. + +Make sure your code adheres to our Rust and TypeScript code style by running: + + - `cargo +nightly fmt` (with the nightly toolchain installed) + - `npm run format` in the `frontend` directory + - `make fmt` in the `policies` directory (if changed) + +When updating SQL queries in the `crates/storage-pg/` crate, you may need to update the `sqlx` introspection data. To do this, make sure to install `cargo-sqlx` (`cargo install sqlx-cli`) and: + + - Apply the latest migrations: `cargo sqlx migrate run` from the `crates/storage-pg/` directory. + - Update the `sqlx` introspection data: `cargo sqlx prepare` from the `crates/storage-pg/` directory. + +## 7. Test, test, test! + +While you're developing and before submitting a patch, you'll want to test your code and adhere to many code style and linting guidelines. + +### Run the linters + +- Run `cargo clippy --workspace` to lint the Rust code. +- Run `npm run lint` in the `frontend` directory to lint the frontend code. +- Run `make fmt` and `make lint` in the `policies` directory to format and lint the included policy. + +### Run the tests + +If you haven't already, install [Cargo-Nextest](https://nexte.st/docs/installation/pre-built-binaries/). + +- Run the tests to the backend by running `cargo nextest run --workspace`. This requires a connection to a PostgreSQL database, set via the `DATABASE_URL` environment variable. +- Run the tests to the frontend by running `npm run test` in the `frontend` directory. +- To run the tests for the included policy, change to the `policies` directory and run one of: + - `make test` (needs Open Policy Agent installed) + - `make PODMAN=1 test` (runs inside a container; needs Podman installed) + - `make DOCKER=1 test` (runs inside a container; needs Docker installed) + +## 8. Submit a pull request + +Once you've made changes, you're ready to submit a pull request. + +When the pull request is opened, you will see a few things: + + 1. Our automated CI (Continuous Integration) pipeline will run (again) the linters, the unit tests, the integration tests, and more. + 1. One or more of the developers will take a look at your pull request and offer feedback. + +From this point, you should: + + 1. Look at the results of the CI pipeline. + - If there is any error, fix the error. + 1. If a developer has requested changes, make these changes and let us know when it is ready for a developer to review again. + - A pull request is a conversation; if you disagree with the suggestions, please respond and discuss it. + 1. Create a new commit with the changes. + - Please do *not* overwrite the history. New commits make the reviewer's life easier. + - Push these commits to your pull request. + 1. Back to 1. + 1. Once the pull request is ready for review again, please **re-request review** from whichever developer did your initial review (or leave a comment in the pull request that you believe all required changes have been made). + +Once both the CI and the developers are happy, the patch will be merged into Matrix Authentication Service and released shortly! diff --git a/matrix-authentication-service/docs/development/database.md b/matrix-authentication-service/docs/development/database.md new file mode 100644 index 00000000..2deafce2 --- /dev/null +++ b/matrix-authentication-service/docs/development/database.md @@ -0,0 +1,78 @@ +# Database + +Interactions with the database goes through `sqlx`. +It provides async database operations with connection pooling, migrations support and compile-time check of queries through macros. + +## Writing database interactions + +All database interactions are done through repositoriy traits. Each repository trait usually manages one type of data, defined in the [`mas-data-model`][mas-data-model] crate. + +Defining a new data type and associated repository looks like this: + + - Define new structs in [`mas-data-model`][mas-data-model] crate + - Define the repository trait in [`mas-storage`][mas-storage] crate + - Make that repository trait available via the `RepositoryAccess` trait in [`mas-storage`][mas-storage] crate + - Setup the database schema by writing a migration file in [`mas-storage-pg`][mas-storage-pg] crate + - Implement the new repository trait in [`mas-storage-pg`][mas-storage-pg] crate + - Write tests for the PostgreSQL implementation in [`mas-storage-pg`][mas-storage-pg] crate + +Some of those steps are documented in more details in the [`mas-storage`][mas-storage] and [`mas-storage-pg`][mas-storage-pg] crates. + +[mas-data-model]: ../rustdoc/mas_data_model/index.html +[mas-storage]: ../rustdoc/mas_storage/index.html +[mas-storage-pg]: ../rustdoc/mas_storage_pg/index.html + +## Compile-time check of queries + +To be able to check queries, `sqlx` has to introspect the live database. +Usually it does so by having the database available at compile time, but to avoid that we're using the `offline` feature of `sqlx`, which saves the introspection informatons as a flat file in the repository. + +Preparing this flat file is done through `sqlx-cli`, and should be done everytime the database schema or the queries changed. + +```sh +# Install the CLI +cargo install sqlx-cli --no-default-features --features postgres + +cd crates/storage-pg/ # Must be in the mas-storage-pg crate folder +export DATABASE_URL=postgresql:///matrix_auth +cargo sqlx prepare +``` + +## Migrations + +Migration files live in the `migrations` folder in the `mas-storage-pg` crate. + +```sh +cd crates/storage-pg/ # Again, in the mas-storage-pg crate folder +export DATABASE_URL=postgresql:///matrix_auth +cargo sqlx migrate run # Run pending migrations +cargo sqlx migrate add [description] # Add new migration files +``` + +Note that migrations are embedded in the final binary and can be run from the service CLI tool. + +### Removing migrations + +For various reasons, we may want to delete migrations. +In case we do, we *must* declare that migration version as allowed to be missing. +This is because on startup, MAS will validate that all the applied migrations are known, and warn if some are missing. + +To do so, get the migration version and add it to the `ALLOWED_MISSING_MIGRATIONS` array in the `mas-storage-pg` crate. + +### Modifying existing migrations + +We may want to modify existing migrations to fix mistakes. +In case we do, we *must* save the hash of the original migration file so that MAS can validate it on startup. + +To do so, extract the first 16 bytes of the existing applied migration and append it to the `ALLOWED_ALTERNATE_CHECKSUMS` array in the `mas-storage-pg` crate. + +```sql +SELECT version, ENCODE(SUBSTRING(checksum FOR 16), 'hex') AS short_checksum +FROM _sqlx_migrations +WHERE version = 20250410000002; +``` +``` + version | short_checksum +----------------+---------------------------------- + 20250410000002 | f2b8f120deae27e760d079a30b77eea3 +``` diff --git a/matrix-authentication-service/docs/development/graphql.md b/matrix-authentication-service/docs/development/graphql.md new file mode 100644 index 00000000..ce946f8f --- /dev/null +++ b/matrix-authentication-service/docs/development/graphql.md @@ -0,0 +1,26 @@ +# Internal GraphQL API + +> **Note:** This API used to be the way for external tools to interact with MAS. However, **external usage is now deprecated** in favour of the REST based [Admin API](../topics/admin-api.md). External access to this API will be removed in a future release. + +MAS uses an internal GraphQL API which is used by the self-service user interface (usually accessible on `/account/`), for users to manage their own account. + +The endpoint for this API can be discovered through the OpenID Connect discovery document, under the `org.matrix.matrix-authentication-service.graphql_endpoint` key. +Though it is usually hosted at `https:///graphql`. + +GraphQL uses [a self-describing schema](https://github.com/element-hq/matrix-authentication-service/blob/main/frontend/schema.graphql), which means that the API can be explored in tools like the GraphQL Playground. +If enabled, MAS hosts an instance of the playground at `https:///graphql/playground`. + +## Authorization + +There are two ways to authorize a request to the GraphQL API: + + - if you are requesting from the self-service user interface (or the MAS-hosted GraphQL Playground), it will use the session cookies to authorize as the current user. This mode only allows the user to access their own data, and will never provide admin access. + - else you will need to provide an OAuth 2.0 access token in the `Authorization` header, with the `Bearer` scheme. + +The access token must have the [`urn:mas:graphql:*`] scope to be able to access the GraphQL API. +With only this scope, the session will be authorized as the user who owns the access token, and will only be able to access their own data. + +To get full access to the GraphQL API, the access token must have the [`urn:mas:admin`] scope in addition to the [`urn:mas:graphql:*`] scope. + +[`urn:mas:graphql:*`]: ../reference/scopes.md#urnmasgraphql +[`urn:mas:admin`]: ../reference/scopes.md#urnmasadmin diff --git a/matrix-authentication-service/docs/development/releasing.md b/matrix-authentication-service/docs/development/releasing.md new file mode 100644 index 00000000..faf5b526 --- /dev/null +++ b/matrix-authentication-service/docs/development/releasing.md @@ -0,0 +1,123 @@ +# Releasing + +MAS follows the same release cadence as Synapse, meaning usually one full release cycle every two weeks, with one week of release candidates. + +## GitHub Action workflows + +There are four main GitHub Action workflows involved in releasing MAS: + +### [`translations-download` workflow] + +This workflow downloads the latest translations from [Localazy] onto the target branch. +It is intended to be run before the start of each release cycle on the main branch and before each release on the release branch. + +Before running it, make sure to review pending translations in [Localazy], enabling new languages that pass the 70% threshold. + +### [`release-branch` workflow] + +This workflow starts a new major/minor release branch and bumps the version to the next major/minor pre-version. +It will tag the version, triggering the `build` workflow for it. + +The next major/minor pre-version is computed from the current version on the main branch, so it works as follows: + + - `v1.2.3` will become `v2.0.0-rc.0` for a major release + - `v1.2.3` will become `v1.3.0-rc.0` for a minor release + +The release branch will be called `release/vX.Y`, and a PR will be automatically opened to merge it into the main branch. + + +### [`release-bump` workflow] + +This workflow bumps the version on a release branch to either the next stable version or the next release candidate version. +This *cannot* be run on the main branch (and will fail if you try). + +This workflow has three meaningful inputs: + + - The release branch to bump + - Whether the release is a pre-release or not: + - If it is a pre-release, `v1.2.3-rc.0` will become `v1.2.3-rc.1`, and `v1.2.3` will become `v1.2.4-rc.0`. + - If it is not a pre-release, `v1.2.3-rc.0` will become `v1.2.3`, and `v1.2.3` will become `v1.2.4`. + - Whether the release branch should be merged back into the main branch or not. In most cases, this should be enabled unless doing a release on a previous release branch. + +### [`build` workflow] + +This workflow is automatically run in three conditions: + + - When a `v*` tag is pushed + - On the `main` branch + - When a PR is tagged with the `Z-Build-Workflow` label (**note that this doesn't work on PRs from forks**) + +In all cases, it will build and push a container image to ghcr.io and build binaries to GitHub Action assets. + +For `v*` tags: + + - It will push the container image with the `MAJOR`, `MAJOR.MINOR`, `MAJOR.MINOR.PATCH`, `sha-HASH`, and `latest` tags for stable releases. + - It will push the container image with the `MAJOR.MINOR.PATCH-rc.N` and `sha-HASH` tags for pre-releases. + - It will **draft** a release on GitHub, with generated changelogs, reference to the built container image, and pre-built binaries attached to the release. + +On the main branch: + + - It will push the container image with the `sha-HASH` and `main` tags. + - It will update the [`unstable`](https://github.com/element-hq/matrix-authentication-service/releases/tag/unstable) GitHub release with the built container image and pre-built binaries. + +When a PR is tagged with the `Z-Build-Workflow` label: + + - It will push the container image with the `sha-HASH` and `pr-NUMBER` tags. + - It will comment on the PR with the built container image. + - Pre-built binaries are available in the workflow artifacts. + + +## Changelog generation + +Changelogs are automatically generated from PR titles and labels. + +The configuration for those can be found in the `.github/release.yml`, but the main labels to be aware of are: + + - `T-Defect`: Bug fixes + - `T-Enhancement`: New features + - `A-Admin-API`: Changes to the admin API + - `A-Documentation`: Documentation + - `A-I18n`: Translations + - `T-Task`: Internal changes + - `A-Dependencies`: Dependency updates + +They are calculated based on the previous release. For release candidates, this includes the previous release candidate. + +## Undrafting releases + +Releases are manually undrafted when the release is ready to be published. +At this point, the releaser should check the changelog and ensure the "Set as pre-release" and "Set as latest release" checkboxes are checked as appropriate. + +## Full release process + + - Start a new release cycle: + 1. Run the [`translations-download` workflow] on the main branch. + 1. Wait for the [translation download PR] to be automatically merged. + 1. Run the [`release-branch` workflow] on the main branch. + 1. Wait for [CI to churn] and the [draft release to appear]. This takes about 30 minutes. + 1. Double-check the changelog on the draft release. + 1. Check the "Set as pre-release" checkbox, and publish the release. + 1. Delete the N-2 release branch on [Localazy](https://localazy.com/console/branching), meaning that once the 0.16 release cycle begins, the 0.14 release branch will be deleted. + - Create new release candidates if needed: + 1. Run the `translations-download` workflow on the release branch. + 1. Wait for the [translation download PR] to be automatically merged. + 1. Run the [`release-bump` workflow] on the release branch, with the `rc` input **checked**. + 1. Wait for [CI to churn] and the [draft release to appear]. This takes about 30 minutes. + 1. Double-check the changelog on the draft release. + 1. Check the "Set as pre-release" checkbox and publish the release. + - Create a new stable release: + 1. Run the [`translations-download` workflow] on the release branch + 1. Wait for the [translation download PR] to be automatically merged + 1. Run the [`release-bump` workflow] on the release branch, with the `rc` input **unchecked**. + 1. Wait for [CI to churn] and the [draft release to appear]. This takes about 30 minutes. + 1. Double-check the changelog on the draft release. + 1. Check the "Set as latest release" checkbox and publish the release. + +[Localazy]: https://localazy.com/p/matrix-authentication-service +[`translations-download` workflow]: https://github.com/element-hq/matrix-authentication-service/actions/workflows/translations-download.yaml +[`release-branch` workflow]: https://github.com/element-hq/matrix-authentication-service/actions/workflows/release-branch.yaml +[`release-bump` workflow]: https://github.com/element-hq/matrix-authentication-service/actions/workflows/release-bump.yaml +[`build` workflow]: https://github.com/element-hq/matrix-authentication-service/actions/workflows/build.yaml +[translation download PR]: https://github.com/element-hq/matrix-authentication-service/pulls?q=is%3Apr+label%3AA-I18n +[CI to churn]: https://github.com/element-hq/matrix-authentication-service/actions/workflows/build.yaml?query=event%3Apush+actor%3Amatrixbot +[draft release to appear]: https://github.com/element-hq/matrix-authentication-service/releases diff --git a/matrix-authentication-service/docs/reference/cli/README.md b/matrix-authentication-service/docs/reference/cli/README.md new file mode 100644 index 00000000..cffc0fd1 --- /dev/null +++ b/matrix-authentication-service/docs/reference/cli/README.md @@ -0,0 +1,45 @@ +# Command line tool + +The command line interface provides subcommands that helps running the service. + +## Logging + +The overall log level of the CLI can be changed via the `RUST_LOG` environment variable. +Default log level is `info`. +Valid levels from least to most verbose are `error`, `warn`, `info`, `debug` and `trace`. + +## Global flags + +### `--config` + +Sets the configuration file to load. +It can be repeated multiple times to merge multiple files together. + +### `--help` + +Print out help instructions. + +### `--version` + +Print the mas cli version. + +--- + +``` +Usage: mas-cli [OPTIONS] [COMMAND] + +Commands: + config Configuration-related commands + database Manage the database + server Runs the web server + worker Run the worker + manage Manage the instance + templates Templates-related commands + doctor Run diagnostics on the deployment + help Print this message or the help of the given subcommand(s) + +Options: + -c, --config Path to the configuration file + -h, --help Print help + -V, --version Print version +``` diff --git a/matrix-authentication-service/docs/reference/cli/config.md b/matrix-authentication-service/docs/reference/cli/config.md new file mode 100644 index 00000000..c624838c --- /dev/null +++ b/matrix-authentication-service/docs/reference/cli/config.md @@ -0,0 +1,59 @@ +# `config` + +Helps to deal with the configuration + +Global options: +- `--config `: Path to the configuration file. +- `--help`: Print help. + +## `config check` + +Check the validity of configuration files. + +```console +$ mas-cli config check --config=config.yaml +INFO mas_cli::config: Configuration file looks good path=["config.yaml"] +``` + +## `config dump` + +Dump the merged configuration tree. + +```console +$ mas-cli config dump --config=first.yaml --config=second.yaml +--- +clients: + # ... +``` + +## `config generate [--synapse-config ] [--output ]` + +Generate a sample configuration file. +It generates random signing keys (`.secrets.keys`) and the cookie encryption secret (`.secrets.encryption`). + +```console +$ mas-cli config generate > config.yaml +INFO generate: mas_config::oauth2: Generating keys... +INFO generate:rsa: mas_config::oauth2: Done generating RSA key +INFO generate:ecdsa: mas_config::oauth2: Done generating ECDSA key +``` + +The `--synapse-config` option can be used to migrate over configuration options from an existing Synapse configuration. + +The `--output` option can be used to specify the output file. If not specified, the output will be written to stdout. + +## `config sync [--prune] [--dry-run]` + +Synchronize the configuration with the database. +This will synchronize the `clients` and `upstream_oauth` sections of the configuration with the database. +By default, it does not delete clients and upstreams that are not in the configuration anymore. Use the `--prune` option to do so. +The `--dry-run` option will log the changes that would be made, without actually making them. + +```console +$ mas-cli config sync --prune --config=config.yaml +INFO cli.config.sync: Syncing providers and clients defined in config to database prune=true dry_run=false +INFO cli.config.sync: Updating provider provider.id=01H3FDH2XZJS8ADKRGWM84PZTY +INFO cli.config.sync: Adding provider provider.id=01H3FDH2XZJS8ADKRGWM84PZTF +INFO cli.config.sync: Deleting client client.id=01GFWRB9MYE0QYK60NZP2YF905 +INFO cli.config.sync: Updating client client.id=01GFWRB9MYE0QYK60NZP2YF904 +``` diff --git a/matrix-authentication-service/docs/reference/cli/database.md b/matrix-authentication-service/docs/reference/cli/database.md new file mode 100644 index 00000000..59a4b962 --- /dev/null +++ b/matrix-authentication-service/docs/reference/cli/database.md @@ -0,0 +1,15 @@ +# `database` + +Run database-related operations + +Global options: +- `--config `: Path to the configuration file. +- `--help`: Print help. + +## `database migrate` + +Run the pending database migrations. + +``` +$ mas-cli database migrate +``` \ No newline at end of file diff --git a/matrix-authentication-service/docs/reference/cli/doctor.md b/matrix-authentication-service/docs/reference/cli/doctor.md new file mode 100644 index 00000000..596db2d9 --- /dev/null +++ b/matrix-authentication-service/docs/reference/cli/doctor.md @@ -0,0 +1,16 @@ +# `doctor` + +Global options: +- `--config `: Path to the configuration file. +- `--help`: Print help. + +## `doctor` + +Run diagnostics on the live deployment. +This tool should help diagnose common issues with the service configuration and deployment. + +When running this tool, make sure it runs from the same point-of-view as the service, with the same configuration file and environment variables. + +``` +$ mas-cli doctor +``` \ No newline at end of file diff --git a/matrix-authentication-service/docs/reference/cli/manage.md b/matrix-authentication-service/docs/reference/cli/manage.md new file mode 100644 index 00000000..24f64001 --- /dev/null +++ b/matrix-authentication-service/docs/reference/cli/manage.md @@ -0,0 +1,147 @@ +# `manage` + + +The MAS CLI provides several subcommands for managing users and configurations + +Global options: +- `--config `: Path to the configuration file. +- `--help`: Print help. + +## `manage add-email` + +Add an email address to the specified user. + +``` +$ mas-cli manage add-email +``` + +## `manage verify-email` + +[DEPRECATED] Mark an email address as verified. + +``` +$ mas-cli manage verify-email +``` + +## `manage promote-admin` + +Make a user admin. + +``` +$ mas-cli manage promote-admin +``` + +**This doesn't make all the users sessions admin, but rather lets the user request admin access in administration tools.** + +## `manage demote-admin` + +Make a user non-admin. + +``` +$ mas-cli manage demote-admin +``` + +## `manage list-admin-users` + +List all users with admin privileges. + +``` +$ mas-cli manage list-admins +``` + +## `manage set-password` + +Set a user password. + +Options: +- `--ignore-complexity`: Don't enforce that the password provided is above the minimum configured complexity. + +``` +$ mas-cli manage set-password --ignore-complexity +``` + +## `manage issue-compatibility-token` + +Issue a compatibility token for a user. + +Options: +- `--device-id `: Device ID to set in the token. If not specified, a random device ID will be generated. +- `--yes-i-want-to-grant-synapse-admin-privileges`: Whether the token should be given admin privileges. + +``` +$ mas-cli manage issue-compatibility-token --device-id --yes-i-want-to-grant-synapse-admin-privileges +``` + +## `manage issue-user-registration-token` + +Create a new user registration token. + +Options: +- `--token `: Specific token string to use. If not provided, a random token will be generated. +- `--usage-limit `: Limit the number of times the token can be used. If not provided, the token can be can be used only once, unless the `--unlimited` flag is set. +- `--unlimited` Allow the token to be used an unlimited number of times. +- `--expires-in `: Time in seconds after which the token expires. If not provided, the token never expires. + +``` +$ mas-cli manage issue-user-registration-token --token --usage-limit --expires-in +``` + +## `manage provision-all-users` + +Trigger a provisioning job for all users. + +``` +$ mas-cli manage provision-all-users +``` + +## `manage kill-sessions` + +Kill all sessions for a user. + +Options: +- `--dry-run`: Do a dry run, ie see which sessions would be killed. + +``` +$ mas-cli manage kill-sessions --dry-run +``` + +## `manage lock-user` + +Lock a user. + +Options: +- `--deactivate`: Whether to deactivate the user. + +``` +$ mas-cli manage lock-user --deactivate +``` + +## `manage unlock-user` + +Unlock a user. + +Options: +- `--reactivate`: Whether to reactivate the user. + +``` +$ mas-cli manage unlock-user --reactivate +``` + +## `manage register-user` + +Register a user. This will interactively prompt for the user's attributes unless the `--yes` flag is set. It bypasses any policy check on the password, email, etc. + +Options: +- `--username `: Username to register. +- `--password `: Password to set. +- `--email `: Email to add. Can be specified multiple times. +- `--upstream-provider-mapping `: Upstream OAuth 2.0 provider mapping. Can be specified multiple times. +- `--admin`: Make the user an admin. +- `--no-admin`: Make the user not an admin. +- `--yes`: Don't ask questions, just do it. +- `--display-name `: Set the user's display name. +- `--ignore-password-complexity`: Don't enforce that the password provided is above the minimum configured complexity. + +``` +$ mas-cli manage register-user +``` diff --git a/matrix-authentication-service/docs/reference/cli/server.md b/matrix-authentication-service/docs/reference/cli/server.md new file mode 100644 index 00000000..b0679dc8 --- /dev/null +++ b/matrix-authentication-service/docs/reference/cli/server.md @@ -0,0 +1,21 @@ +# `server` + +Global options: +- `--config `: Path to the configuration file. +- `--help`: Print help. + +## `server` + +Runs the authentication service. + +Options: +- `--no-migrate`: Do not apply pending database migrations on start. +- `--no-worker`: Do not start the task worker. +- `--no-sync`: Do not sync the configuration with the database. + +``` +$ mas-cli server +INFO mas_cli::server: Starting task scheduler +INFO mas_core::templates: Loading builtin templates +INFO mas_cli::server: Listening on http://0.0.0.0:8080 +``` diff --git a/matrix-authentication-service/docs/reference/cli/syn2mas.md b/matrix-authentication-service/docs/reference/cli/syn2mas.md new file mode 100644 index 00000000..0089cc70 --- /dev/null +++ b/matrix-authentication-service/docs/reference/cli/syn2mas.md @@ -0,0 +1,29 @@ +# `syn2mas` + +Tool to import data from an existing Synapse homeserver into MAS. + +Global options: +- `--config `: Path to the MAS configuration file. +- `--help`: Print help. +- `--synapse-config `: Path to the Synapse configuration file. +- `--synapse-database-uri `: Override the Synapse database URI. + +## `syn2mas check` + +Check the setup for potential problems before running a migration + +```console +$ mas-cli syn2mas check --config mas_config.yaml --synapse-config homeserver.yaml +``` + +## `syn2mas migrate [--dry-run]` + +Migrate data from the homeserver to MAS. + +The `--dry-run` option will perform a dry-run of the migration, which is safe to run without stopping Synapse. +It will perform a full data migration, but then empty the MAS database at the end to roll back. + + +```console +$ mas-cli syn2mas migrate --config mas_config.yaml --synapse-config homeserver.yaml +``` diff --git a/matrix-authentication-service/docs/reference/cli/templates.md b/matrix-authentication-service/docs/reference/cli/templates.md new file mode 100644 index 00000000..9877f3af --- /dev/null +++ b/matrix-authentication-service/docs/reference/cli/templates.md @@ -0,0 +1,33 @@ +# `templates` + +Global options: +- `--config `: Path to the configuration file. +- `--help`: Print help. + +## `templates check` + +Check the validity of the templates loaded by the config. +It compiles the templates and then renders them with different contexts. + +```console +$ mas-cli templates check +INFO mas_core::templates: Loading templates from filesystem path=./templates/**/*.{html,txt} +INFO mas_core::templates::check: Rendering template name="login.html" context={"csrf_token":"fake_csrf_token","form":{"fields_errors":{},"form_errors":[],"has_errors":false}} +INFO mas_core::templates::check: Rendering template name="register.html" context={"__UNUSED":null,"csrf_token":"fake_csrf_token"} +INFO mas_core::templates::check: Rendering template name="index.html" context={"csrf_token":"fake_csrf_token","current_session":{"active":true,"created_at":"2021-09-24T13:26:52.962135085Z","id":1,"last_authd_at":"2021-09-24T13:26:52.962135316Z","user_id":2,"username":"john"},"discovery_url":"https://example.com/.well-known/openid-configuration"} +... +``` + +Options: +- `--out-dir `: Render templates and emit them to the specified directory, which must either not exist or be empty. +- `--stabilise`: Remove sources of nondeterminism from template inputs, so that renders are reproducible. Only useful with `--out-dir`. + +What is checked: +- the Jinja templates are syntactically valid +- the templates can render with a few sample values, with the branding from the MAS configuration + - undefined variables (`{{ undefined_variable }}`) will raise errors +- all translation keys exist + +What is not checked: +- the validity of the generated HTML (you can forget closing tags, or otherwise create invalid HTML output) +- all translation keys exist *in your intended language(s)* (so some translation keys may fall back to English) diff --git a/matrix-authentication-service/docs/reference/cli/worker.md b/matrix-authentication-service/docs/reference/cli/worker.md new file mode 100644 index 00000000..a24964f9 --- /dev/null +++ b/matrix-authentication-service/docs/reference/cli/worker.md @@ -0,0 +1,13 @@ +# `worker` + +Global options: +- `--config `: Path to the configuration file. +- `--help`: Print help. + +## `worker` + +Runs the authentication service worker. + +``` +$ mas-cli worker +``` diff --git a/matrix-authentication-service/docs/reference/configuration.md b/matrix-authentication-service/docs/reference/configuration.md new file mode 100644 index 00000000..d2d65a0b --- /dev/null +++ b/matrix-authentication-service/docs/reference/configuration.md @@ -0,0 +1,886 @@ +# Configuration file reference + +## `http` + +Controls the web server. + +```yaml +http: + # Public URL base used when building absolute public URLs + public_base: https://auth.example.com/ + + # OIDC issuer advertised by the service. Defaults to `public_base` + issuer: https://example.com/ + + # List of HTTP listeners, see below + listeners: + # ... +``` + +### `http.listeners` + +Each listener can serve multiple resources, and listen on multiple TCP ports or UNIX sockets. + +```yaml +http: + listeners: + # The name of the listener, used in logs and metrics + - name: web + + # List of resources to serve + resources: + # Serves the .well-known/openid-configuration document + - name: discovery + # Serves the human-facing pages, such as the login page + - name: human + # Serves the OAuth 2.0/OIDC endpoints + - name: oauth + # Serves the Matrix C-S API compatibility endpoints + - name: compat + # Serve the GraphQL API used by the frontend, + # and optionally the GraphQL playground + - name: graphql + playground: true + # Serve the given folder on the /assets/ path + - name: assets + path: ./share/assets/ + # Serve the admin API on the /api/admin/v1/ path. Disabled by default + #- name: adminapi + + # List of addresses and ports to listen to + binds: + # First option: listen to the given address + - address: "[::]:8080" + + # Second option: listen on the given host and port combination + - host: localhost + port: 8081 + + # Third option: listen on the given UNIX socket + - socket: /tmp/mas.sock + + # Fourth option: grab an already open file descriptor given by the parent process + # This is useful when using systemd socket activation + - fd: 1 + # Kind of socket that was passed, defaults to tcp + kind: tcp # or unix + + # Whether to enable the PROXY protocol on the listener + proxy_protocol: false + + # If set, makes the listener use TLS with the provided certificate and key + tls: + #certificate: + certificate_file: /path/to/cert.pem + #key: + key_file: /path/to/key.pem + #password: + #password_file: /path/to/password.txt +``` + +The following additional resources are available, although it is recommended to serve them on a separate listener, not exposed to the public internet: + +- `name: prometheus`: serves a Prometheus-compatible metrics endpoint on `/metrics`, if the Prometheus exporter is enabled in `telemetry.metrics.exporter`. +- `name: health`: serves the health check endpoint on `/health`. + +## `database` + +Configure how to connect to the PostgreSQL database. + +MAS must not be connected to a database pooler (such as pgBouncer or pgCat) when it is configured in transaction pooling mode. +See [the relevant section of the database page](database.md#a-warning-about-database-pooling-software) for more information. + +```yaml +database: + # Full connection string as per + # https://www.postgresql.org/docs/13/libpq-connect.html#id-1.7.3.8.3.6 + uri: postgresql://user:password@hostname:5432/database?sslmode=require + + # -- OR -- + # Separate parameters + host: hostname + port: 5432 + #socket: + username: user + password: password + database: database + + # Whether to use SSL to connect to the database + ssl_mode: require # or disable, prefer, verify-ca, verify-full + #ssl_ca: # PEM-encoded certificate + ssl_ca_file: /path/to/ca.pem # Path to the root certificate file + + # Client certificate to present to the server when SSL is enabled + #ssl_certificate: # PEM-encoded certificate + ssl_certificate_file: /path/to/cert.pem # Path to the certificate file + #ssl_key: # PEM-encoded key + ssl_key_file: /path/to/key.pem # Path to the key file + + # Additional parameters for the connection pool + min_connections: 0 + max_connections: 10 + connect_timeout: 30 + idle_timeout: 600 + max_lifetime: 1800 +``` + +## `matrix` + +Settings related to the connection to the Matrix homeserver + +```yaml +matrix: + # The homeserver name, as per the `server_name` in the Synapse configuration file + homeserver: example.com + + # Shared secret used to authenticate the service to the homeserver + # This must be of high entropy, because leaking this secret would allow anyone to perform admin actions on the homeserver + secret_file: /path/to/secret/file + # Alternatively, the shared secret can be passed inline. + # secret: "SomeRandomSecret" + + # URL to which the homeserver is accessible from the service + endpoint: "http://localhost:8008" +``` + +## `templates` + +Allows loading custom templates + +```yaml +templates: + # From where to load the templates + # This is relative to the current working directory, *not* the config file + path: /to/templates + + # Path to the frontend assets manifest file + assets_manifest: /to/manifest.json + + # From where to load the translation files + # Default in Docker distribution: `/usr/local/share/mas-cli/translations/` + # Default in pre-built binaries: `./share/translations/` + # Default in locally-built binaries: `./translations/` + translations_path: /to/translations +``` + +## `clients` + +List of OAuth 2.0/OIDC clients and their keys/secrets. Each `client_id` must be a [ULID](https://github.com/ulid/spec). + +```yaml +clients: + # Confidential client + - client_id: 000000000000000000000FIRST + client_auth_method: client_secret_post + client_secret_file: secret + # OR client_secret: c1!3n753c237 + # List of authorized redirect URIs + redirect_uris: + - http://localhost:1234/callback + # Public client + - client_id: 00000000000000000000SEC0ND + client_auth_method: none +``` + +**Note:** any additions or modifications in this list are synced with the database on server startup. Removed entries are only removed with the [`config sync --prune`](../reference/cli/config.md#config-sync---prune---dry-run) command. + +## `secrets` + +Signing and encryption secrets + +```yaml +secrets: + # Encryption secret (used for encrypting cookies and database fields) + # This must be a 32-byte long hex-encoded key + encryption: c7e42fb8baba8f228b2e169fdf4c8216dffd5d33ad18bafd8b928c09ca46c718 + + # Signing keys + keys: + # At least one RSA key must be configured + - key_file: keys/rsa_key + - kid: "iv1aShae" + key: | + -----BEGIN EC PRIVATE KEY----- + MHQCAQEEIE8yeUh111Npqu2e5wXxjC/GA5lbGe0j0KVXqZP12vqioAcGBSuBBAAK + oUQDQgAESKfUtKaLqCfhK+p3z870W59yOYvd+kjGWe+tK16SmWzZJbRCgdHakHE5 + MC6tJRnvedsYoKTrYoDv/XZIBI9zlA== + -----END EC PRIVATE KEY----- +``` + +### `secrets.encryption{_file}` + +The encryption secret used for encrypting cookies and database fields. It takes +the form of a 32-bytes-long hex-encoded string. To provide the encryption secret +via file, set `secrets.encryption_file` to the file path; alternatively use +`secrets.encryption` for declaring the secret inline. The options +`secrets.encryption_file` and `secrets.encryption` are mutually exclusive. + +If given via file, the encyption secret is only read at application startup. +The secret is not updated when the content of the file changes. + +> ⚠️ **Warning** – Do not change the encryption secret after the initial start! +> Changing the encryption secret afterwards will lead to a loss of all encrypted +> information in the database. + +### Signing Keys + +The service can use a number of key types for signing. +The following key types are supported: + +- RSA +- ECDSA with the P-256 (`prime256v1`) curve +- ECDSA with the P-384 (`secp384r1`) curve +- ECDSA with the K-256 (`secp256k1`) curve + +The following key formats are supported: + +- PKCS#1 PEM or DER-encoded RSA private key +- PKCS#8 PEM or DER-encoded RSA or ECDSA private key, encrypted or not +- SEC1 PEM or DER-encoded ECDSA private key + +The signing keys are used for: +- signing ID Tokens (as returned in the [Token Endpoint] at `/oauth2/token`); +- signing the response of the [UserInfo Endpoint] at `/oauth2/userinfo` if the + client requests a signed response; +- (niche) signing a JWT for authenticating to an upstream OAuth provider when + the `private_key_jwt` client auth method is configured. + +At a minimum, an RSA key must be configured in order to be compliant with the +[OpenID Connect Core specification][oidc-core-rs256] which specifies the RS256 algorithm +as mandatory to implement by servers for interoperability reasons. + +The keys can be given as a directory path via `secrets.keys_dir` +or, alternatively, as an inline configuration list via `secrets.keys`. + +[Token Endpoint]: https://openid.net/specs/openid-connect-core-1_0.html#TokenEndpoint +[UserInfo Endpoint]: https://openid.net/specs/openid-connect-core-1_0.html#UserInfo +[oidc-core-rs256]: https://openid.net/specs/openid-connect-core-1_0.html#ServerMTI + +#### `secrets.keys_dir` + +Path to the directory containing MAS signing key files. +Only keys that don’t require a password are supported. + +#### `secrets.keys` + +Each entry in the list corresponds to one signing key used by MAS. +The key can either be specified inline (with the `key` property), +or loaded from a file (with the `key_file` property). + +A [JWK Key ID] is automatically derived from each key. +To override this default, set `kid` to a custom value. +The `kid` can be any case-sensitive string value as long as it is unique to this list; +a key’s `kid` value must be stable across restarts. + +For PKCS#8 encoded keys, the `password` or `password_file` properties can be used to decrypt the key. + +[JWK Key ID]: + +## `passwords` + +Settings related to the local password database + +```yaml +passwords: + # Whether to enable the password database. + # If disabled, users will only be able to log in using upstream OIDC providers + enabled: true + + # Minimum complexity required for passwords, estimated by the zxcvbn algorithm + # Must be between 0 and 4, default is 3 + # See https://github.com/dropbox/zxcvbn#usage for more information + minimum_complexity: 3 + + # List of password hashing schemes being used + # /!\ Only change this if you know what you're doing + # TODO: document this section better + schemes: + - version: 1 + algorithm: argon2id +``` + +## `account` + +Configuration related to account management + +```yaml +account: + # Whether users are allowed to change their email addresses. + # + # Defaults to `true`. + email_change_allowed: true + + # Whether users are allowed to change their display names + # + # Defaults to `true`. + # This should be in sync with the policy in the homeserver configuration. + displayname_change_allowed: true + + # Whether to enable self-service password registration + # + # Defaults to `false`. + # This has no effect if password login is disabled. + password_registration_enabled: false + + # Whether self-service registrations require a valid email + # + # Defaults to `true` + # This has no effect if password registration is disabled. + password_registration_email_required: true + + # Whether users are allowed to change their passwords + # + # Defaults to `true`. + # This has no effect if password login is disabled. + password_change_allowed: true + + # Whether email-based password recovery is enabled + # + # Defaults to `false`. + # This has no effect if password login is disabled. + password_recovery_enabled: false + + # Whether users are allowed to delete their own account + # + # Defaults to `true`. + account_deactivation_allowed: true + + # Whether users can log in with their email address. + # + # Defaults to `false`. + # This has no effect if password login is disabled. + login_with_email_allowed: false + + # Whether registration tokens are required for password registrations. + # + # Defaults to `false`. + # + # When enabled, users must provide a valid registration token during password + # registration. This has no effect if password registration is disabled. + registration_token_required: false +``` + +## `captcha` + +Settings related to CAPTCHA protection + +```yaml +captcha: + # Which service to use for CAPTCHA protection. Set to `null` (or `~`) to disable CAPTCHA protection + service: ~ + + # Use Google reCAPTCHA v2 + #service: recaptcha_v2 + #site_key: "6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI" + #secret_key: "6LeIxAcTAAAAAGG"-vFI1TnRWxMZNFuojJ4WifJWe + + # Use Cloudflare Turnstile + #service: cloudflare_turnstile + #site_key: "1x00000000000000000000AA" + #secret_key: "1x0000000000000000000000000000000AA" + + # Use hCaptcha + #service: hcaptcha + #site_key: "10000000-ffff-ffff-ffff-000000000001" + #secret_key: "0x0000000000000000000000000000000000000000" +``` + + +## `policy` + +Policy settings + +```yaml +policy: + # Path to the WASM module + # Default in Docker distribution: `/usr/local/share/mas-cli/policy.wasm` + # Default in pre-built binaries: `./share/policy.wasm` + # Default in locally-built binaries: `./policies/policy.wasm` + wasm_module: ./policies/policy.wasm + # Entrypoint to use when evaluating client registrations + client_registration_entrypoint: client_registration/violation + # Entrypoint to use when evaluating user registrations + register_entrypoint: register/violation + # Entrypoint to use when evaluating authorization grants + authorization_grant_entrypoint: authorization_grant/violation + # Entrypoint to use when changing password + password_entrypoint: password/violation + # Entrypoint to use when adding an email address + email_entrypoint: email/violation + + # This data is being passed to the policy + data: + # Users which are allowed to ask for admin access. If possible, use the + # can_request_admin flag on users instead. + admin_users: + - person1 + - person2 + + # Client IDs which are allowed to ask for admin access with a + # client_credentials grant + admin_clients: + - 01H8PKNWKKRPCBW4YGH1RWV279 + - 01HWQCPA5KF10FNCETY9402WGF + + # Dynamic Client Registration + client_registration: + # don't require URIs to be on the same host. default: false + allow_host_mismatch: false + # allow non-SSL and localhost URIs. default: false + allow_insecure_uris: false + # don't require clients to provide a client_uri. default: false + allow_missing_client_uri: false + + # Restrictions on user registration + registration: + # If specified, the username (localpart) *must* match one of the allowed + # usernames. If unspecified, all usernames are allowed. + allowed_usernames: + # Exact usernames that are allowed + literals: ["alice", "bob"] + # Substrings that match allowed usernames + substrings: ["user"] + # Regular expressions that match allowed usernames + regexes: ["^[a-z]+$"] + # Prefixes that match allowed usernames + prefixes: ["user-"] + # Suffixes that match allowed usernames + suffixes: ["-corp"] + # If specified, the username (localpart) *must not* match one of the + # banned usernames. If unspecified, all usernames are allowed. + banned_usernames: + # Exact usernames that are banned + literals: ["admin", "root"] + # Substrings that match banned usernames + substrings: ["admin", "root"] + # Regular expressions that match banned usernames + regexes: ["^admin$", "^root$"] + # Prefixes that match banned usernames + prefixes: ["admin-", "root-"] + # Suffixes that match banned usernames + suffixes: ["-admin", "-root"] + + # Restrict what email addresses can be added to a user + emails: + # If specified, the email address *must* match one of the allowed addresses. + # If unspecified, all email addresses are allowed. + allowed_addresses: + # Exact emails that are allowed + literals: ["alice@example.com", "bob@example.com"] + # Regular expressions that match allowed emails + regexes: ["@example\\.com$"] + # Suffixes that match allowed emails + suffixes: ["@example.com"] + + # If specified, the email address *must not* match one of the banned addresses. + # If unspecified, all email addresses are allowed. + banned_addresses: + # Exact emails that are banned + literals: ["alice@evil.corp", "bob@evil.corp"] + # Emails that contains those substrings are banned + substrings: ["evil"] + # Regular expressions that match banned emails + regexes: ["@evil\\.corp$"] + # Suffixes that match banned emails + suffixes: ["@evil.corp"] + # Prefixes that match banned emails + prefixes: ["alice@"] + + requester: + # List of IP addresses and CIDRs that are not allowed to register + banned_ips: + - 192.168.0.1 + - 192.168.1.0/24 + - fe80::/64 + + # User agent patterns that are not allowed to register + banned_user_agents: + literals: ["Pretend this is Real;"] + substrings: ["Chrome"] + regexes: ["Chrome 1.*;"] + prefixes: ["Mozilla/"] + suffixes: ["Safari/605.1.15"] +``` + +## `rate_limiting` + +Settings for limiting the rate of user actions to prevent abuse. + +Each rate limiter consists of two options: +- `burst`: a base amount of how many actions are allowed in one go. +- `per_second`: how many units of the allowance replenish per second. + +```yaml +rate_limiting: + # Limits how many account recovery attempts are allowed. + # These limits can protect against e-mail spam. + # + # Note: these limit also apply to recovery e-mail re-sends. + account_recovery: + # Controls how many account recovery attempts are permitted + # based on source IP address. + per_ip: + burst: 3 + per_second: 0.0008 + + # Controls how many account recovery attempts are permitted + # based on the e-mail address that is being used for recovery. + per_address: + burst: 3 + per_second: 0.0002 + + # Limits how many login attempts are allowed. + # + # Note: these limit also applies to password checks when a user attempts to + # change their own password. + login: + # Controls how many login attempts are permitted + # based on source IP address. + # This can protect against brute force login attempts. + per_ip: + burst: 3 + per_second: 0.05 + + # Controls how many login attempts are permitted + # based on the account that is being attempted to be logged into. + # This can protect against a distributed brute force attack + # but should be set high enough to prevent someone's account being + # casually locked out. + per_account: + burst: 1800 + per_second: 0.5 + + # Limits how many registrations attempts are allowed, + # based on source IP address. + # This limit can protect against e-mail spam and against people registering too many accounts. + registration: + burst: 3 + per_second: 0.0008 +``` + +## `telemetry` + +Settings related to metrics and traces + +```yaml +telemetry: + tracing: + # List of propagators to use for extracting and injecting trace contexts + propagators: + # Propagate according to the W3C Trace Context specification + - tracecontext + # Propagate according to the W3C Baggage specification + - baggage + # Propagate trace context with Jaeger compatible headers + - jaeger + + # The default: don't export traces + exporter: none + + # Export traces to an OTLP-compatible endpoint + #exporter: otlp + #endpoint: https://localhost:4318 + + metrics: + # The default: don't export metrics + exporter: none + + # Export metrics to an OTLP-compatible endpoint + #exporter: otlp + #endpoint: https://localhost:4317 + + # Export metrics by exposing a Prometheus endpoint + # This requires mounting the `prometheus` resource to an HTTP listener + #exporter: prometheus + + sentry: + # DSN to use for sending errors and crashes to Sentry + dsn: https://public@host:port/1 +``` + +## `email` + +Settings related to sending emails + +```yaml +email: + from: '"The almighty auth service" ' + reply_to: '"No reply" ' + + # Default transport: don't send any emails + transport: blackhole + + # Send emails using SMTP + #transport: smtp + #mode: plain | tls | starttls + #hostname: localhost + #port: 587 + #username: username + #password: password + + # Send emails by calling a local sendmail binary + #transport: sendmail + #command: /usr/sbin/sendmail +``` + +## `upstream_oauth2` + +Settings related to upstream OAuth 2.0/OIDC providers. +Additions and modifications within this section are synced with the database on server startup. +Removed entries are only removed with the [`config sync --prune`](./cli/config.md#config-sync---prune---dry-run) command. + +### `upstream_oauth2.providers` + +A list of upstream OAuth 2.0/OIDC providers to use to authenticate users. + +Sample configurations for popular providers can be found in the [upstream provider setup](../setup/sso.md#sample-configurations) guide. + +```yaml +upstream_oauth2: + providers: + - # A unique identifier for the provider + # Must be a valid ULID + id: 01HFVBY12TMNTYTBV8W921M5FA + + # The issuer URL, which will be used to discover the provider's configuration. + # If discovery is enabled, this *must* exactly match the `issuer` field + # advertised in `/.well-known/openid-configuration`. + # It must be set if OIDC discovery is enabled (which is the default). + #issuer: https://example.com/ + + # A human-readable name for the provider, + # which will be displayed on the login page + #human_name: Example + + # A brand identifier for the provider, which will be used to display a logo + # on the login page. Values supported by the default template are: + # - `apple` + # - `google` + # - `facebook` + # - `github` + # - `gitlab` + # - `twitter` + #brand_name: google + + # The client ID to use to authenticate to the provider + client_id: mas-fb3f0c09c4c23de4 + + # The client secret to use to authenticate to the provider + # This is only used by the `client_secret_post`, `client_secret_basic` + # and `client_secret_jwk` authentication methods + client_secret_file: secret + # OR client_secret: f4f6bb68a0269264877e9cb23b1856ab + + # Which authentication method to use to authenticate to the provider + # Supported methods are: + # - `none` + # - `client_secret_basic` + # - `client_secret_post` + # - `client_secret_jwt` + # - `private_key_jwt` (using the keys defined in the `secrets.keys` section) + # - `sign_in_with_apple` (a special authentication method for Sign-in with Apple) + token_endpoint_auth_method: client_secret_post + + # Additional paramaters for the `sign_in_with_apple` authentication method + # See https://www.oauth.com/oauth2-servers/pkce/authorization-code-flow-with-pkce/ + #sign_in_with_apple: + # private_key: | + # -----BEGIN PRIVATE KEY----- + # ... + # -----END PRIVATE KEY----- + # team_id: "" + # key_id: "" + + # Which signing algorithm to use to sign the authentication request when using + # the `private_key_jwt` or the `client_secret_jwt` authentication methods + #token_endpoint_auth_signing_alg: RS256 + + # The scopes to request from the provider + # In most cases, it should always include `openid` scope + scope: "openid email profile" + + # How the provider configuration and endpoints should be discovered + # Possible values are: + # - `oidc`: discover the provider through OIDC discovery, + # with strict metadata validation (default) + # - `insecure`: discover through OIDC discovery, but skip metadata validation + # - `disabled`: don't discover the provider and use the endpoints below + #discovery_mode: oidc + + # Whether PKCE should be used during the authorization code flow. + # Possible values are: + # - `auto`: use PKCE if the provider supports it (default) + # Determined through discovery, and disabled if discovery is disabled + # - `always`: always use PKCE (with the S256 method) + # - `never`: never use PKCE + #pkce_method: auto + + # Whether to fetch user claims from the userinfo endpoint + # This is disabled by default, as most providers will return the necessary + # claims in the `id_token` + #fetch_userinfo: true + + # If set, ask for a signed response on the userinfo endpoint, and validate + # the response uses the given algorithm + #userinfo_endpoint_auth_signing_alg: RS256 + + # The userinfo endpoint + # This takes precedence over the discovery mechanism + #userinfo_endpoint: https://example.com/oauth2/userinfo + + # The provider authorization endpoint + # This takes precedence over the discovery mechanism + #authorization_endpoint: https://example.com/oauth2/authorize + + # The provider token endpoint + # This takes precedence over the discovery mechanism + #token_endpoint: https://example.com/oauth2/token + + # The provider JWKS URI + # This takes precedence over the discovery mechanism + #jwks_uri: https://example.com/oauth2/keys + + # The response mode we ask the provider to use for the callback + # Possible values are: + # - `query`: The provider will send the response as a query string in the + # URL search parameters. This is the default. + # - `form_post`: The provider will send the response as a POST request with + # the response parameters in the request body + #response_mode: query + + # Additional parameters to include in the authorization request + #additional_authorization_parameters: + # foo: "bar" + + # Whether the `login_hint` should be forwarded to the provider in the + # authorization request. + #forward_login_hint: false + + # What to do when receiving an OIDC Backchannel logout request. + # Possible values are: + # - `do_nothing` (default): do nothing, other than validating and logging the request + # - `logout_browser_only`: Only log out the MAS 'browser session' started by this OIDC session + # - `logout_all`: Log out all sessions started by this OIDC session, including MAS 'browser sessions' and client sessions + #on_backchannel_logout: do_nothing + + # How user attributes should be mapped + # + # Most of those attributes have two main properties: + # - `action`: what to do with the attribute. Possible values are: + # - `ignore`: ignore the attribute + # - `suggest`: suggest the attribute to the user, but let them opt out + # - `force`: always import the attribute, and don't fail if it's missing + # - `require`: always import the attribute, and fail if it's missing + # - `template`: a Jinja2 template used to generate the value. In this template, + # the `user` variable is available, which contains the user's attributes + # retrieved from the `id_token` given by the upstream provider and/or through + # the userinfo endpoint. + # + # Each attribute has a default template which follows the well-known OIDC claims. + # + claims_imports: + # The subject is an internal identifier used to link the + # user's provider identity to local accounts. + # By default it uses the `sub` claim as per the OIDC spec, + # which should fit most use cases. + subject: + #template: "{{ user.sub }}" + + # By default, new users will see a screen confirming the attributes they + # are about to have on their account. + # + # Setting this to `true` allows skipping this screen, but requires the + # `localpart.action` to be set to `require` and the other attributes + # actions to be set to `ignore`, `force` or `require`. + #skip_confirmation: false + + # The localpart is the local part of the user's Matrix ID. + # For example, on the `example.com` server, if the localpart is `alice`, + # the user's Matrix ID will be `@alice:example.com`. + localpart: + #action: force + #template: "{{ user.preferred_username }}" + + # How to handle when localpart already exists. + # Possible values are (default: fail): + # - `fail` : Fails the upstream OAuth 2.0 login. + # - `add` : Adds the upstream account link to the existing user, regardless of whether there is an existing link or not. + # - `replace` : Replace any existing upstream OAuth 2.0 identity link for this provider on the matching user. + # - `set` : Adds the upstream account link *only* if there is no existing link for this provider on the matching user. + #on_conflict: fail + + # The display name is the user's display name. + displayname: + #action: suggest + #template: "{{ user.name }}" + + # An email address to import. + email: + #action: suggest + #template: "{{ user.email }}" + + # An account name, for display purposes only + # This helps end user identify what account they are using + account_name: + #template: "@{{ user.preferred_username }}" +``` + +## `branding` + +Configuration section for tweaking the branding of the service. + +```yaml +branding: + # A human-readable name. Defaults to the server's address. + #service_name: + + # Link to a privacy policy, displayed in the footer of web pages and + # emails. It is also advertised to clients through the `op_policy_uri` + # OIDC provider metadata. + #policy_uri: + + # Link to a terms of service document, displayed in the footer of web + # pages and emails. It is also advertised to clients through the + # `op_tos_uri` OIDC provider metadata. + # + # This also adds a mandatory checkbox during registration. The value of + # this config item will be stored in the `user_terms` table to indicate + # which ToS document the user accepted. Note that currently changing this + # value will not force existing users to re-accept terms. + #tos_uri: + + # Legal imprint, displayed in the footer in the footer of web pages and emails. + #imprint: + + # Logo displayed in some web pages. + #logo_uri: +``` + +## `experimental` + +Settings that may change or be removed in future versions. +Some of which are in this section because they don't have a stable place in the configuration yet. + +```yaml +experimental: + # Time-to-live of OAuth 2.0 access tokens in seconds. Defaults to 300, 5 minutes. + #access_token_ttl: 300 + + # Time-to-live of compatibility access tokens in seconds, when refresh tokens are supported. Defaults to 300, 5 minutes. + #compat_token_ttl: 300 + + # Experimental feature to automatically expire inactive sessions + # Disabled by default + #inactive_session_expiration: + # Time after which an inactive session is automatically finished in seconds + #ttl: 32400 + + # Should compatibility sessions expire after inactivity. Defaults to true. + #expire_compat_sessions: true + + # Should OAuth 2.0 sessions expire after inactivity. Defaults to true. + #expire_oauth_sessions: true + + # Should user sessions expire after inactivity. Defaults to true. + #expire_user_sessions: true +``` diff --git a/matrix-authentication-service/docs/reference/scopes.md b/matrix-authentication-service/docs/reference/scopes.md new file mode 100644 index 00000000..fcb76d35 --- /dev/null +++ b/matrix-authentication-service/docs/reference/scopes.md @@ -0,0 +1,101 @@ +# OAuth 2.0 scopes + +The [default policy](../topics/policy.md#authorization-requests) shipped with MAS supports the following scopes: + + - [`openid`](#openid) + - [`email`](#email) + - [`urn:matrix:client:api:*`](#urnmatrixclientapi) + - [`urn:matrix:client:device:[device id]`](#urnmatrixclientdevicedevice-id) + - [`urn:synapse:admin:*`](#urnsynapseadmin) + - [`urn:mas:admin`](#urnmasadmin) + - [`urn:mas:graphql:*`](#urnmasgraphql) + +## OpenID Connect scopes + +MAS supports the following standard OpenID Connect scopes, as defined in [OpenID Connect Core 1.0]: + +### `openid` + +The `openid` scope is a special scope that indicates that the client is requesting an OpenID Connect `id_token`. +The userinfo endpoint as described by the same specification requires this scope to be present in the request. + +The default policy allows any client and any user to request this scope. + +### `email` + +Requires the `openid` scope to be present in the request. +It adds the user's email address to the `id_token` and to the claims returned by the userinfo endpoint. + +The default policy allows any client and any user to request this scope. + +## Matrix-related scopes + +Those scopes are specific to the Matrix protocol and are part of [MSC2967]. + +### `urn:matrix:client:api:*` + +This scope grants access to the full Matrix client-server API. + +The default policy allows any client and any user to request this scope. + +### `urn:matrix:client:device:[device id]` + +This scope sets the device ID of the session, where `[device id]` is the device ID of the session. +Currently, MAS only allows the following characters in the device ID: `a-z`, `A-Z`, `0-9` and `-`. +It also needs to be at least 10 characters long. + +There can only be one device ID in the scope list of a session. + +The default policy allows any client and any user to request this scope. + +## Synapse-specific scopes + +MAS also supports one Synapse-specific scope, which aren't formally defined in any specification. + +### `urn:synapse:admin:*` + +This scope grants access to the [Synapse admin API]. + +Because of how Synapse works for now, this scope by itself isn't sufficient to access the admin API. +A session wanting to access the admin API also needs to have the `urn:matrix:client:api:*` scope. + +The default policy doesn't allow everyone to request this scope. +It allows: + +- users with the `can_request_admin` attribute set to `true` in the database +- users listed in the [`policy.data.admin_users`](../reference/configuration.md#policy) configuration option + +## MAS-specific scopes + +MAS also has a few scopes that are specific to the MAS implementation. + +### `urn:mas:admin` + +This scope grants full access to the MAS [Admin API]. + +The default policy doesn't allow everyone to request this scope. +It allows: + +- for the "[authorization code]" and "[device authorization]" grants: + - users with the `can_request_admin` attribute set to `true` in the database + - users listed in the [`policy.data.admin_users`](../reference/configuration.md#policy) configuration option +- for the "client credentials" grant: + - clients that are listed in the [`policy.data.admin_clients`](../reference/configuration.md#policy) configuration option + +### `urn:mas:graphql:*` + +This scope grants access to the whole MAS [Internal GraphQL API]. +What permission the session has on the API is determined by the entity that the session is authorized as. +When [authorized as a user](../topics/authorization.md#authorized-as-a-user-or-authorized-as-a-client) (and without the `mas:urn:admin` scope), this will usually allow querying and mutating the user's own data. + +The default policy allows any client and any user to request this scope. + +However, as noted in the [Internal GraphQL API] documentation, access to the Internal GraphQL API from outside of MAS itself is deprecated in favour of the [Admin API]. + +[authorization code]: ../topics/authorization.md#authorization-code-grant +[device authorization]: ../topics/authorization.md#device-authorization-grant +[Internal GraphQL API]: ../development/graphql.md +[Admin API]: ../topics/admin-api.md +[Synapse admin API]: https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html +[OpenID Connect Core 1.0]: https://openid.net/specs/openid-connect-core-1_0.html +[MSC2967]: https://github.com/matrix-org/matrix-spec-proposals/pull/2967 diff --git a/matrix-authentication-service/docs/rustdoc/mas_handlers/README.md b/matrix-authentication-service/docs/rustdoc/mas_handlers/README.md new file mode 100644 index 00000000..e65f319e --- /dev/null +++ b/matrix-authentication-service/docs/rustdoc/mas_handlers/README.md @@ -0,0 +1,2 @@ +This is a placeholder which is replaced by the built crates technical documentation when building the documentation. +If you're seeing this, you're probably looking at the documentation source, and should look at the built documentation instead here: diff --git a/matrix-authentication-service/docs/setup/README.md b/matrix-authentication-service/docs/setup/README.md new file mode 100644 index 00000000..54b8ae8b --- /dev/null +++ b/matrix-authentication-service/docs/setup/README.md @@ -0,0 +1,21 @@ +# Planning the installation + +This part of the documentation goes through installing the service, the important parts of the configuration file, and how to run the service. + +Before going through the installation, it is important to understand the different components of an OIDC-native Matrix homeserver, and how they interact with each other. +It is meant to complement the homeserver, replacing the internal authentication mechanism with the authentication service. + +Making a homeserver deployment OIDC-native radically shifts the authentication model: the homeserver is no longer responsible for managing user accounts and sessions. +The authentication service becomes the source of truth for user accounts and access tokens, and the homeserver only verifies the validity of the tokens it receives through the service. + +At time of writing, the authentication service is meant to be run on a standalone domain name (e.g. `auth.example.com`), and the homeserver on another (e.g. `matrix.example.com`). +This domain will be user-facing as part of the authentication flow. + +An example setup could look like this: + + - The deployment domain is `example.com`, so Matrix IDs look like `@user:example.com` + - The authentication service is deployed on `auth.example.com` + - The homeserver is deployed on `matrix.example.com` + +With the installation planned, it is time to go through the installation and configuration process. +The first section focuses on [installing the service](./installation.md). diff --git a/matrix-authentication-service/docs/setup/database.md b/matrix-authentication-service/docs/setup/database.md new file mode 100644 index 00000000..7ac5e4c7 --- /dev/null +++ b/matrix-authentication-service/docs/setup/database.md @@ -0,0 +1,75 @@ +# Database configuration + +The service uses a [PostgreSQL](https://www.postgresql.org/) database to store all of its state. +Although it may be possible to run with earlier versions, it is recommended to use **PostgreSQL 13** or later. +Connection to the database is configured in the [`database`](../reference/configuration.md#database) section of the configuration file. + +## A warning about database pooling software + +MAS must not be connected to a database pooler (such as pgBouncer or pgCat) when it is configured in transaction pooling mode. +This is because MAS uses advisory locks, which are not compatible with transaction pooling. + +You should instead configure such poolers in session pooling mode. + +## Set up a database + +You will need to create a dedicated PostgreSQL database for the service. +The database can run on the same server as the service, or on a dedicated host. +The recommended setup for this database is to create a dedicated role and database for the service. + +Assuming your PostgreSQL database user is called `postgres`, first authenticate as the database user with: + +```sh +su - postgres +# Or, if your system uses sudo to get administrative rights +sudo -u postgres bash +``` + +Then, create a postgres user and a database with: + +```sh +# this will prompt for a password for the new user +createuser --pwprompt mas_user +createdb --owner=mas_user mas +``` + +The above will create a user called `mas_user` with a password of your choice, and a database called `mas` owned by the `mas_user` user. + +## Service configuration + +Once the database is created, the service needs to be configured to connect to it. +Edit the [`database`](../reference/configuration.md#database) section of the configuration file to match the database just created: + +```yaml +database: + # Full connection string as per + # https://www.postgresql.org/docs/13/libpq-connect.html#id-1.7.3.8.3.6 + uri: postgres://:@/ + + # -- OR -- + # Separate parameters + host: + port: 5432 + username: + password: + database: +``` + +## Database migrations + +The service manages the database schema with embedded migrations. +Those migrations are run automatically when the service starts, but it is also possible to run them manually. +This is done using the [`database migrate`](../reference/cli/database.md#database-migrate) command: + +```sh +mas-cli database migrate +``` + +## Next steps + +Once the database is up, the remaining steps are to: + + - [Set up the connection to the homeserver (recommended)](./homeserver.md) + - [Setup email sending (optional)](../reference/configuration.md#email) + - [Configure a reverse proxy (optional)](./reverse-proxy.md) + - [Run the service](./running.md) diff --git a/matrix-authentication-service/docs/setup/general.md b/matrix-authentication-service/docs/setup/general.md new file mode 100644 index 00000000..26e563c8 --- /dev/null +++ b/matrix-authentication-service/docs/setup/general.md @@ -0,0 +1,76 @@ +# General configuration + +## Initial configuration generation + +The service needs a few unique secrets and keys to work. +It mainly includes: + + - the various signing keys referenced in the [`secrets.keys`](../reference/configuration.md#secrets) section + - the encryption key used to encrypt fields in the database and cookies, set in the [`secrets.encryption`](../reference/configuration.md#secrets) section + - a shared secret between the service and the homeserver, set in the [`matrix.secret`](../reference/configuration.md#matrix) section + +Although it is possible to generate these secrets manually, it is strongly recommended to use the [`config generate`](../reference/cli/config.md#config-generate) command to generate a configuration file with unique secrets and keys. + +```sh +mas-cli config generate > config.yaml +``` + +If you're using the docker container, the command `mas-cli` can be invoked with `docker run`: + +```sh +docker run ghcr.io/element-hq/matrix-authentication-service config generate > config.yaml +``` + +This applies to all of the `mas-cli` commands in this document. + +**Note:** The generated configuration file is very extensive, and contains the default values for all the configuration options. +This will be made easier to read in the future, but in the meantime, it is recommended to strip untouched options from the configuration file. + +## Using and inspecting the configuration file + +When using the `mas-cli`, multiple configuration files can be loaded, with the following rule: + +1. If the `--config` option is specified, possibly multiple times, load the file at the specified path, relative to the current working directory +2. If not, load the files specified in the `MAS_CONFIG` environment variable if set, separated by `:`, relative to the current working directory +3. If not, load the file at `config.yaml` in the current working directory + +The validity of the configuration file can be checked using the [`config check`](../reference/cli/config.md#config-check) command: + +```sh +# This will read both the `first.yaml` and `second.yaml` files +mas-cli config check --config=first.yaml --config=second.yaml + +# This will also read both the `first.yaml` and `second.yaml` files +MAS_CONFIG=first.yaml:second.yaml mas-cli config check + +# This will only read the `config.yaml` file +mas-cli config check +``` + +To help understand what the resulting configuration looks like after merging all the configuration files, the [`config dump`](../reference/cli/config.md#config-dump) command can be used: + +```sh +mas-cli config dump +``` + +## Configuration schema + +The configuration file is validated against a JSON schema, which can be found [here](../config.schema.json). +Many [tools in text editors](https://json-schema.org/implementations.html#editors) can use this schema to provide autocompletion and validation. + +## Syncing the configuration file with the database + +Some sections of the configuration file need to be synced every time the configuration file is updated. +This includes the [`clients`](../reference/configuration.md#clients) and [`upstream_oauth`](../reference/configuration.md#upstream-oauth) sections. +The configuration is synced by default on startup, and can be manually synced using the [`config sync`](../reference/cli/config.md#config-sync---prune---dry-run) command. + +By default, this will only add new clients and upstream OAuth providers and update existing ones, but will not remove entries that were removed from the configuration file. +To do so, use the `--prune` option: + +```sh +mas-cli config sync --prune +``` + +## Next step + +After generating the configuration file, the next step is to [set up a database](./database.md). diff --git a/matrix-authentication-service/docs/setup/homeserver.md b/matrix-authentication-service/docs/setup/homeserver.md new file mode 100644 index 00000000..ead3f8a1 --- /dev/null +++ b/matrix-authentication-service/docs/setup/homeserver.md @@ -0,0 +1,69 @@ +# Homeserver configuration + +The `matrix-authentication-service` is designed to be run alongside a Matrix homeserver. +It currently only supports [Synapse](https://github.com/element-hq/synapse) version 1.136.0 or later. +The authentication service needs to be able to call the Synapse admin API to provision users through a shared secret, and Synapse needs to be able to call the service to verify access tokens using the OAuth 2.0 token introspection endpoint. + +## Configure the connection to the homeserver + +In the [`matrix`](../reference/configuration.md#matrix) section of the configuration file, add the following properties: + + - `kind`: the type of homeserver to connect to, currently only `synapse` is supported + - `homeserver`: corresponds to the `server_name` in the Synapse configuration file + - `secret`: a shared secret the service will use to call the homeserver MAS API + - `endpoint`: the URL to which the homeserver is accessible from the service + +```yaml +matrix: + kind: synapse + homeserver: example.com + endpoint: "http://localhost:8008" + secret: "AVeryRandomSecretPleaseUseSomethingSecure" + # Alternatively, using a file: + #secret_path: /path/to/secret.txt +``` + +## Configure the homeserver to delegate authentication to the service + +Set up the delegated authentication feature **in the Synapse configuration** in the `matrix_authentication_service` section: + +```yaml +matrix_authentication_service: + enabled: true + endpoint: http://localhost:8080/ + secret: "AVeryRandomSecretPleaseUseSomethingSecure" + # Alternatively, using a file: + #secret_file: /path/to/secret.txt +``` + +The `endpoint` property should be set to the URL of the authentication service. +This can be an internal URL, to avoid unnecessary round-trips. + +The `secret` property must match in both the Synapse configuration and the Matrix Authentication Service configuration. + +## Set up the compatibility layer + +The service exposes a compatibility layer to allow legacy clients to authenticate using the service. +This works by exposing a few Matrix endpoints that should be proxied to the service. + +The following Matrix Client-Server API endpoints need to be handled by the authentication service: + + - [`/_matrix/client/*/login`](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3login) + - [`/_matrix/client/*/logout`](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3logout) + - [`/_matrix/client/*/refresh`](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3refresh) + +See the [reverse proxy configuration](./reverse-proxy.md) guide for more information. + + +## Migrating from the experimental MSC3861 feature + +If you are migrating from the experimental MSC3861 feature in Synapse, you will need to migrate the `experimental_features.msc3861` section of the Synapse configuration to the `matrix_authentication_service` section. + +To do so, you need to: + + - Remove the `experimental_features.msc3861` section from the Synapse configuration + - Add the `matrix_authentication_service` section to the Synapse configuration with: + - `enabled: true` + - `endpoint` set to the URL of the authentication service + - `secret` set to the same secret as the `admin_token` that was set in the `msc3861` section + - Optionally, remove the client provisioned for Synapse in the `clients` section of the MAS configuration diff --git a/matrix-authentication-service/docs/setup/installation.md b/matrix-authentication-service/docs/setup/installation.md new file mode 100644 index 00000000..acc45519 --- /dev/null +++ b/matrix-authentication-service/docs/setup/installation.md @@ -0,0 +1,112 @@ +# Installation + +## Pre-built binaries + +Pre-built binaries can be found attached on each release, for Linux on both `x86_64` and `aarch64` architectures. + +- [`mas-cli-aarch64-linux.tar.gz`](https://github.com/element-hq/matrix-authentication-service/releases/latest/download/mas-cli-aarch64-linux.tar.gz) +- [`mas-cli-x86_64-linux.tar.gz`](https://github.com/element-hq/matrix-authentication-service/releases/latest/download/mas-cli-x86_64-linux.tar.gz) + +Each archive contains: + +- the `mas-cli` binary +- assets needed for running the service, including: + - `share/assets/`: the built frontend assets + - `share/manifest.json`: the manifest for the frontend assets + - `share/policy.wasm`: the built OPA policies + - `share/templates/`: the default templates + - `share/translations/`: the default translations + +The location of all these assets can be overridden in the [configuration file](./configuration.md). + +--- + +Example shell commands to download and extract the `mas-cli` binary: + +```sh +ARCH=x86_64 # or aarch64 +OS=linux +VERSION=latest # or a specific version, like "v0.1.0" + +# URL to the right archive +URL="https://github.com/element-hq/matrix-authentication-service/releases/${VERSION}/download/mas-cli-${ARCH}-${OS}.tar.gz" + +# Create a directory and extract the archive in it +mkdir -p /path/to/mas +curl -sL "$URL" | tar xzC /path/to/mas + +# This should display the help message +/path/to/mas/mas-cli --help +``` + + +## Using the Docker image + +A pre-built Docker image is available here: [`ghcr.io/element-hq/matrix-authentication-service:latest`](https://ghcr.io/element-hq/matrix-authentication-service:latest) + +The `latest` tag is built using the latest release. +The `main` tag is built from the `main` branch, and each commit on the `main` branch is also tagged with a stable `sha-` tag. + +The image can also be built from the source: + +1. Get the source + ```sh + git clone https://github.com/element-hq/matrix-authentication-service.git + cd matrix-authentication-service + ``` +1. Build the image + ```sh + docker build -t mas . + ``` + +## Building from the source + +Building from the source requires: + +- The latest stable [Rust toolchain](https://www.rust-lang.org/learn/get-started) +- [Node.js (18 and later)](https://nodejs.org/en/) and [npm](https://www.npmjs.com/get-npm) +- the [Open Policy Agent](https://www.openpolicyagent.org/docs/latest/#running-opa) binary (or alternatively, Docker) + +1. Get the source + ```sh + git clone https://github.com/element-hq/matrix-authentication-service.git + cd matrix-authentication-service + ``` +1. Build the frontend + ```sh + cd frontend + npm ci + npm run build + cd .. + ``` + This will produce a `frontend/dist` directory containing the built frontend assets. + This folder, along with the `frontend/dist/manifest.json` file, can be relocated, as long as the configuration file is updated accordingly. +1. Build the Open Policy Agent policies + ```sh + cd policies + make + cd .. + ``` + OR, if you don't have `opa` installed and want to build through the OPA docker image + ```sh + cd policies + make DOCKER=1 + cd .. + ``` + This will produce a `policies/policy.wasm` file containing the built OPA policies. + This file can be relocated, as long as the configuration file is updated accordingly. +1. Compile the CLI + ```sh + cargo build --release + ``` +1. Grab the built binary + ```sh + cp ./target/release/mas-cli ~/.local/bin # Copy the binary somewhere in $PATH + mas-cli --help # Should display the help message + ``` + +## Next steps + +The service needs some configuration to work. +This includes random, private keys and secrets. +Follow the [configuration guide](./general.md) to configure the service. diff --git a/matrix-authentication-service/docs/setup/migration.md b/matrix-authentication-service/docs/setup/migration.md new file mode 100644 index 00000000..6cd8ce98 --- /dev/null +++ b/matrix-authentication-service/docs/setup/migration.md @@ -0,0 +1,206 @@ +# Migrating an existing homeserver + +One of the design goals of MAS has been to allow it to be used to migrate an existing homeserver, specifically without requiring users to re-authenticate and ensuring that all existing clients continue to work. + +Features that support this include: + +- Ability to import existing password hashes from Synapse +- Ability to import existing sessions and devices +- Ability to import existing access tokens +- Ability to import existing upstream IdP subject ID mappings +- Provides a compatibility layer for legacy Matrix authentication + +## Preparing for the migration + +The deployment is non-trivial, so it is important to read through and understand the steps involved and make a plan before starting. + +### Is your setup ready to be migrated? + +#### SAML2 and LDAP Single Sign-On Providers are not supported + +A deployment that requires SAML or LDAP-based authentication should use a service like [Dex](https://github.com/dexidp/dex) to bridge between the SAML provider and the authentication service. +MAS differs from Synapse in that it does **not** have built-in support for SAML or LDAP-based providers. + +#### Custom password providers are not supported + +If your Synapse homeserver currently uses a custom password provider module, please note that MAS does not support these. + +#### SQLite databases are not supported + +It is worth noting that MAS currently only supports PostgreSQL as a database backend. +The migration tool only supports reading from PostgreSQL for the Synapse database as well. + +### Install and configure MAS alongside your existing homeserver + +Follow the instructions in the [installation guide](installation.md) to install MAS alongside your existing homeserver. + +You'll need a blank PostgreSQL database for MAS to use; it does not share the database with the homeserver. + +MAS provides a tool to generate a configuration file based on your existing Synapse configuration. This is useful for kickstarting your new configuration. + +```sh +mas-cli config generate --synapse-config homeserver.yaml --output mas_config.yaml +``` + +When using this tool, be careful to examine the log output for any warnings about unsupported configuration options. + +#### Local passwords + +Synapse uses bcrypt as its password hashing scheme, while MAS defaults to using the newer argon2id. +You will have to configure the version 1 scheme as bcrypt with `unicode_normalization: true` for migrated passwords to work. +It is also recommended that you keep argon2id as version 2 so that once users log in, their hashes will be updated to the newer, recommended scheme. + +Example passwords configuration: +```yml +passwords: + enabled: true + schemes: + - version: 1 + algorithm: bcrypt + unicode_normalization: true + # Optional, must match the `password_config.pepper` in the Synapse config + #secret: secretPepperValue + - version: 2 + algorithm: argon2id +``` + +If you have a pepper configured in your Synapse password configuration, you'll need to match that on version 1 of the equivalent MAS configuration. + +The migration checker will inform you if this has not been configured properly. + +### Map any upstream SSO providers + +If you are using an upstream SSO provider, then you will need to configure the upstream provider in MAS manually. + +MAS does not support SAML or LDAP upstream providers. +If you are using one of these, you will need to use an adapter such as Dex at this time, but we have not yet documented this procedure. + +Each upstream provider that was used by at least one user in Synapse will need to be configured in MAS. + +Set the `synapse_idp_id` attribute on the provider to: + +- `"oidc"` if you used an OIDC provider in Synapse's legacy `oidc_config` configuration section. +- `"oidc-myprovider"` if you used an OIDC provider in Synapse's `oidc_providers` configuration list, with a `provider` of `"myprovider"`. + (This is because Synapse prefixes the provider ID with `oidc-` internally.) + +Without the `synapse_idp_id`s being set, `mas-cli syn2mas` does not understand which providers in Synapse correspond to which provider in MAS. + +For example, if your Synapse configuration looked like this: + +```yaml +oidc_providers: + - idp_id: dex + idp_name: "My Dex server" + issuer: "https://example.com/dex" + client_id: "synapse" + client_secret: "supersecret" + scopes: ["openid", "profile", "email"] + user_mapping_provider: + config: + localpart_template: "{{ user.email.split('@')[0].lower() }}" + email_template: "{{ user.email }}" + display_name_template: "{{ user.name|capitalize }}" +``` + +Then the equivalent configuration in MAS would look like this: + +```yaml +upstream_oauth2: + providers: + - id: 01JSHPZHAXC50QBKH67MH33TNF + synapse_idp_id: oidc-dex + issuer: "https://example.com/dex" + human_name: "My Dex server" + client_id: "synapse" + client_secret: "supersecret" + token_endpoint_auth_method: client_secret_basic + scope: "email openid profile" + claims_imports: + localpart: + action: require + template: "{{ user.email.split('@')[0].lower() }}" + displayname: + action: force + template: "{{ user.name|capitalize }}" + email: + action: force + template: "{{ user.email }}" +``` + +The migration checker will inform you if a provider is missing from MAS' config. + +### Run the migration checker + +You can use the `check` command of the `syn2mas` tool to identify configuration problems before starting the migration. +You do not need to stop Synapse to run this command. + +```sh +mas-cli syn2mas check --config mas_config.yaml --synapse-config homeserver.yaml +``` + +This may output a list of errors and warnings. + +If you have any errors, you must resolve them before starting the migration. + +If you have any warnings, please read and understand them, and possibly resolve them. +Resolving warnings is not strictly required before starting the migration. + +### Run the migration in test mode (dry-run) + +MAS can perform a dry-run of the import, which is safe to run without stopping Synapse. +It will perform a full data migration but then empty the MAS database at the end to roll back. + +This means it is safe to run multiple times without worrying about resetting the MAS database. +It also means the time this dry-run takes is representative of the time it will take to perform the actual migration. + +```sh +mas-cli syn2mas migrate --config mas_config.yaml --synapse-config homeserver.yaml --dry-run +``` + +## Doing the migration + +Having completed the preparation, you can now proceed with the actual migration. Note that this will require downtime for the homeserver and is not easily reversible. + +### Backup your data and configuration + +As with any migration, it is important to back up your data before proceeding. + +We also suggest making a backup copy of your homeserver's known good configuration before making any changes to enable MAS integration. + +### Shut down the homeserver + +This ensures that no new sessions are created while the migration is in progress. + +### Configure the homeserver to enable MAS integration + +Follow the instructions in the [homeserver configuration guide](homeserver.md) to configure the homeserver to use MAS. + +### Do the import + +Once the homeserver has been stopped, MAS has been configured (but is not running!), and you have a successful migration check, run `syn2mas`'s `migrate` command. + +```sh +mas-cli syn2mas migrate --config mas_config.yaml --synapse-config homeserver.yaml +``` + +#### What to do if it goes wrong + +If the migration fails with an error: + +- You can try to fix the error and make another attempt by re-running the command; or +- You can revert your homeserver configuration (so MAS integration is disabled once more) and abort the migration for now. In this case, you should not start MAS up. + +In *some cases*, MAS may have written to its own database during a failed migration, causing it to complain in subsequent runs. +In this case, you can safely delete and recreate the MAS database, then start over. + +In *any case*, the migration tool itself **will not** write to the Synapse database, so as long as MAS hasn't been started, it is safe to roll back the migration without restoring the Synapse database. + +Please report migration failures to the developers. + +### Start up the homeserver + +Start up the homeserver again with the new configuration. + +### Start up MAS + +Now you can start MAS. diff --git a/matrix-authentication-service/docs/setup/reverse-proxy.md b/matrix-authentication-service/docs/setup/reverse-proxy.md new file mode 100644 index 00000000..6fb4a393 --- /dev/null +++ b/matrix-authentication-service/docs/setup/reverse-proxy.md @@ -0,0 +1,202 @@ +# Configuring a reverse proxy + +Although the service can be exposed directly to the internet, including handling the TLS termination, many deployments will want to run a reverse proxy in front of the service. + +In those configuration, the service should be configured to listen on `localhost` or Unix domain socket. + +## Example configuration + +```yaml +http: + public_base: https://auth.example.com/ + listeners: + - name: web + resources: + - name: discovery + - name: human + - name: oauth + - name: compat + - name: graphql + - name: assets + + binds: + # Bind on a local port + - host: localhost + port: 8080 + + # OR bind on a Unix domain socket + #- socket: /var/run/mas.sock + + # OR bind on a systemd socket + #- fd: 0 + # kind: tcp # or unix + + # Optional: use the PROXY protocol + #proxy_protocol: true +``` + +## Base nginx configuration + +A basic configuration for `nginx`, which proxies traffic to the service would look like this: + +```nginx +server { + listen 443 ssl http2; + listen [::]:443 ssl http2; + server_name auth.example.com; + + ssl_certificate path/to/fullchain.pem; + ssl_certificate_key path/to/privkey.pem; + + location / { + proxy_http_version 1.1; + proxy_pass http://localhost:8080; + # OR via the Unix domain socket + #proxy_pass http://unix:/var/run/mas.sock; + + # Forward the client IP address + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + # or, using the PROXY protocol + #proxy_protocol on; + } +} +``` + +## Compatibility layer + +For the compatibility layer, the following endpoints need to be proxied to the service: + + - `/_matrix/client/*/login` + - `/_matrix/client/*/logout` + - `/_matrix/client/*/refresh` + +For example, a nginx configuration could look like: + +```nginx +server { + listen 443 ssl http2; + listen [::]:443 ssl http2; + + server_name matrix.example.com; + + # Forward to the auth service + location ~ ^/_matrix/client/(.*)/(login|logout|refresh) { + proxy_http_version 1.1; + proxy_pass http://localhost:8080; + # OR via the Unix domain socket + #proxy_pass http://unix:/var/run/mas.sock; + + # Forward the client IP address + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + # or, using the PROXY protocol + #proxy_protocol on; + } + + # Forward to Synapse + # as per https://element-hq.github.io/synapse/latest/reverse_proxy.html#nginx + location ~ ^(/_matrix|/_synapse/client|/_synapse/mas) { + proxy_pass http://localhost:8008; + proxy_set_header X-Forwarded-For $remote_addr; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Host $host; + + client_max_body_size 50M; + proxy_http_version 1.1; + } +} +``` + +## Preserve the client IP + +For rate-limiting and logging purposes, MAS needs to know the client IP address, which can be lost when using a reverse proxy. +There are two ways to preserve the client IP address + +### `X-Forwarded-For` header + +MAS can infer the client IP address from the `X-Forwarded-For` header. +It will trust the value for this header only if the request comes from a trusted reverse proxy. + +The range of IPs that can be trusted is configured using the `trusted_proxies` configuration option, which has the default private IP ranges. + +```yaml +http: + trusted_proxies: + - 192.168.0.0/16 + - 172.16.0.0/12 + - 10.0.0.0/10 + - 127.0.0.1/8 + - fd00::/8 + - ::1/128 +``` + +With nginx, this can be achieved by setting the `proxy_set_header` directive to `X-Forwarded-For $proxy_add_x_forwarded_for`. + +### Proxy protocol + +MAS supports the [PROXY protocol](https://www.haproxy.org/download/1.8/doc/proxy-protocol.txt) to preserve the client IP address. +To enable it, enable the `proxy_protocol` option on the listener: + +```yaml +http: + listeners: + - name: web + resources: + - name: discovery + - name: human + - name: oauth + - name: compat + - name: graphql + - name: assets + binds: + - address: "[::]:8080" + proxy_protocol: true +``` + +With nginx, this can be achieved by setting the `proxy_protocol` directive to `on` in the `location` block. + +## Serve assets directly + +To avoid unnecessary round-trips, the assets can be served directly by nginx, and the `assets` resource can be removed from the service configuration. + +```yaml +http: + listeners: + - name: web + resources: + - name: discovery + - name: human + - name: oauth + - name: compat + - name: graphql + # MAS doesn't need to serve the assets anymore + #- name: assets + binds: + - address: "[::]:8080" + proxy_protocol: true +``` + +Make sure the assets directory served by nginx is up to date. + +```nginx +server { + # --- SNIP --- + + location / { + # --- SNIP --- + } + + # Make nginx serve the assets directly + location /assets/ { + root /path/to/share/assets/; + + # Serve pre-compressed assets + gzip_static on; + # With the ngx_brotli module installed + # https://github.com/google/ngx_brotli + #brotli_static on; + + # Cache assets for a year + expires 365d; + } +} +``` diff --git a/matrix-authentication-service/docs/setup/running.md b/matrix-authentication-service/docs/setup/running.md new file mode 100644 index 00000000..a107ecbc --- /dev/null +++ b/matrix-authentication-service/docs/setup/running.md @@ -0,0 +1,63 @@ +# Running the service + +To fully function, the service needs to run two main components: + + - An HTTP server + - A background worker + +By default, the [`mas-cli server`](../reference/cli/server.md) command will start both components. +It is possible to only run the HTTP server by setting the `--no-worker` option, and run a background worker with the [`mas-cli worker`](../reference/cli/worker.md) command. + +Both components are stateless, and can be scaled horizontally by running multiple instances of each. + +## Runtime requirements + +Other than the binary, the service needs a few files to run: + + - The templates, referenced by the [`templates.path`](../reference/configuration.md#templates) configuration option + - The compiled policy, referenced by the [`policy.path`](../reference/configuration.md#policy) configuration option + - The frontend assets, referenced by the `path` option of the `assets` resource in the [`http.listeners`](../reference/configuration.md#http) configuration section + - The frontend manifest file, referenced by tge [`templates.assets_manifest`](../reference/configuration.md#templates) configuration option + +Be sure to check the [installation instructions](./installation.md) for more information on how to get these files, and make sure the configuration file is updated accordingly. + +**If you are using [the docker image](./installation.md#using-the-docker-image)**, everything is already included in the image at the right place, so in most cases you don't need to do anything. + +**If you are using [the pre-built binaries](./installation.md#pre-built-binaries)**, those files are shipped alongside them in the `share` directory. +The default configuration will look for them from the current working directory, meaning that you don't have to adjust the paths, as long as you are running the service from the parent directory of the `share` directory. + +## Configure the HTTP server + +The service can be configured to have multiple HTTP listeners, serving different resources. +See the [`http.listeners`](../reference/configuration.md#http) configuration section for more information. + +The service needs to be aware of the public URL it is served on, regardless of the HTTP listeners configuration. +This is done using the [`http.public_base`](../reference/configuration.md#http) configuration option. +By default, the OIDC issuer advertised by the `/.well-known/openid-configuration` endpoint will be the same as the `public_base` URL, but can be configured to be different. + +## Tweak the remaining configuration + +A few configuration sections might still require some tweaking, including: + + - [`telemetry`](../reference/configuration.md#telemetry): to setup metrics, tracing and Sentry crash reporting + - [`email`](../reference/configuration.md#email): to setup email sending + - [`password`](../reference/configuration.md#password): to enable/disable password authentication + - [`account`](../reference/configuration.md#account): to configure what account management features are enabled + - [`upstream_oauth`](../reference/configuration.md#upstream-oauth): to configure upstream OAuth providers + + +## Run the service + +Once the configuration is done, the service can be started with the [`mas-cli server`](../reference/cli/server.md) command: + +```sh +mas-cli server +``` + +It is advised to run the service as a non-root user, using a tool like [`systemd`](https://www.freedesktop.org/wiki/Software/systemd/) to manage the service lifecycle. + + +## Troubleshoot common issues + +Once the service is running, it is possible to check its configuration using the [`mas-cli doctor`](../reference/cli/doctor.md) command. +This should help diagnose common issues with the service configuration and deployment. diff --git a/matrix-authentication-service/docs/setup/sso.md b/matrix-authentication-service/docs/setup/sso.md new file mode 100644 index 00000000..15d0212d --- /dev/null +++ b/matrix-authentication-service/docs/setup/sso.md @@ -0,0 +1,670 @@ +# Configure an upstream SSO provider + +The authentication service supports using an upstream OpenID Connect provider to authenticate its users. +Multiple providers can be configured, and can be used in conjunction with the local password database authentication. + +Any OIDC compliant provider should work with the service as long as it supports the authorization code flow. + +**Note that the service does not support other SSO protocols such as SAML**, and there is no plan to support them in the future. +A deployment which requires SAML or LDAP-based authentication should use a service like [Dex](https://github.com/dexidp/dex) to bridge between the SAML provider and the authentication service. + +## General configuration + +Configuration of upstream providers is done in the `upstream_oauth2` section of the configuration file, which has a `providers` list. +Additions and changes to this sections are synced with the database on startup. +Removals need to be applied using the [`mas-cli config sync --prune`](../reference/cli/config.md#config-sync---prune---dry-run) command. + +**An exhaustive list of all the parameters is available in the [configuration file reference](../reference/configuration.md#upstream_oauth2).** + +The general configuration usually goes as follows: + + - determine a unique `id` for the provider, which will be used as stable identifier between the configuration file and the database. This `id` must be a ULID, and can be generated using online tools like + - create an OAuth 2.0/OIDC client on the provider's side, using the following parameters: + - `redirect_uri`: `https:///upstream/callback/` + - `response_type`: `code` + - `response_mode`: `query` + - `grant_type`: `authorization_code` + - (optional) `backchannel_logout_uri`: `https:///upstream/backchannel-logout/` + - fill the `upstream_oauth2` section of the configuration file with the following parameters: + - `providers`: + - `id`: the previously generated ULID + - `client_id`: the client ID of the OAuth 2.0/OIDC client given by the provider + - `client_secret`: the client secret of the OAuth 2.0/OIDC client given by the provider + - `issuer`: the issuer URL of the provider + - `scope`: the scope to request from the provider. `openid` is usually required, and `profile` and `email` are recommended to import a few user attributes. + - setup user attributes mapping to automatically fill the user profile with data from the provider. See the [user attributes mapping](#user-attributes-mapping) section for more details. + +## User attributes mapping + +The authentication service supports importing the following user attributes from the provider: + + - The localpart/username (e.g. `@localpart:example.com`) + - The display name + - An email address + - An account name, to help end users identify what account they are using + +For each of those attributes, administrators can configure a mapping using the claims provided by the upstream provider. +They can also configure what should be done for each of those attributes. It can either: + + - `ignore`: ignore the attribute, and let the user fill it manually + - `suggest`: suggest the attribute to the user, but let them opt-out of importing it + - `force`: automatically import the attribute, but don't fail if it is not provided by the provider + - `require`: automatically import the attribute, and fail if it is not provided by the provider + +A Jinja2 template is used as mapping for each attribute. +The following default templates are used: + + - `localpart`: `{{ user.preferred_username }}` + - `displayname`: `{{ user.name }}` + - `email`: `{{ user.email }}` + - `account_name`: none + +The template has the following variables available: + + - `id_token_claims`: an object with the claims got through the `id_token` given by the provider, if provided by the provider + - `userinfo_claims`: an object with the claims got through the `userinfo` endpoint, if `fetch_userinfo` is enabled + - `user`: an object which contains the claims from both the `id_token` and the `userinfo` endpoint + - `extra_callback_parameters`: an object with the additional parameters the provider sent to the redirect URL + + +## Allow linking existing user accounts + +The authentication service supports linking external provider identities to existing local user accounts if the `localpart` matches. + +If the `localpart` given by the upstream provider matches an existing user and the `claims_imports.localpart.action` is set to `force` or `require`, by default the service will refuse to link to that existing account. +This behaviour is controlled by the `claims_imports.localpart.on_conflict` option, which can be set to: + + * `fail` *(default)*: fails the upstream OAuth 2.0 login + * `add`: automatically adds the upstream account to the existing user, regardless of whether the existing user already has another upstream account or not + * `set`: automatically adds the upstream account to the existing user only if there are no other upstream accounts for that provider linked to the user + * `replace`: automatically replaces any upstream account for that provider linked to the user + +```yaml +upstream_oauth2: + providers: + - id: … + claims_imports: + localpart: + action: force + on_conflict: set +``` + +> ⚠️ **Security Notice** +> Enabling this option can introduce a risk of account takeover. +> +> To mitigate this risk, ensure that this option is only enabled for identity providers where you can guarantee that the attribute mapping `localpart` will reliably and uniquely correspond to the intended local user account. + + +## Multiple providers behaviour + +Multiple authentication methods can be configured at the same time, in which case the authentication service will let the user choose which one to use. +This is true if both the local password database and an upstream provider are configured, or if multiple upstream providers are configured. +In such cases, the `human_name` parameter of the provider configuration is used to display a human-readable name for the provider, and the `brand_name` parameter is used to show a logo for well-known providers. + +If there is only one upstream provider configured and the local password database is disabled ([`passwords.enabled`](../reference/configuration.md#passwords) is set to `false`), the authentication service will automatically trigger an authorization flow with this provider. + +## Backchannel logout + +The service supports receiving [OpenID Connect Back-Channel Logout](https://openid.net/specs/openid-connect-backchannel-1_0.html) requests. +Those are notifications from the upstream provider that the user has logged out of the provider. + +The backchannel logout URI must be configured in the provider as `https:///upstream/backchannel-logout/`, where `` is the `id` of the provider. + +By default, the authentication service will not perform any action when receiving a backchannel logout request. +The [`on_backchannel_logout`](../reference/configuration.md#upstream_oauth2) option can be used to configure what to do when receiving a backchannel logout request. + +Possible values are: + + - `do_nothing`: Do nothing, other than validating and logging the request + - `logout_browser_only`: Only log out the MAS 'browser session' started by this OIDC session + - `logout_all`: Log out all sessions started by this OIDC session, including MAS 'browser sessions' and client sessions + +One important caveat is that `logout_all` will log out all sessions started by this upstream OIDC session, including 'remote' ones done through the Device Code flow. +Concretely, this means that if QR-code login is used to log in on a phone from a laptop, when MAS receives a backchannel logout request from the upstream provider for the laptop, MAS will also log out the session on the phone. + +## Sample configurations + +This section contains sample configurations for popular OIDC providers. + +### Apple + +Sign-in with Apple uses special non-standard for authenticating clients, which requires a special configuration. + +```yaml +upstream_oauth2: + providers: + - id: 01JAYS74TCG3BTWKADN5Q4518C + issuer: "https://appleid.apple.com" + human_name: "Apple" + brand_name: "apple" + client_id: "" # TO BE FILLED + scope: "openid name email" + response_mode: "form_post" + token_endpoint_auth_method: "sign_in_with_apple" + sign_in_with_apple: + # Only one of the below should be filled for the private key + private_key_file: "" # TO BE FILLED + private_key: | # TO BE FILLED + # + + team_id: "" # TO BE FILLED + key_id: "" # TO BE FILLED + claims_imports: + localpart: + action: ignore + displayname: + action: suggest + # SiWA passes down the user infos as query parameters in the callback + # which is available in the extra_callback_parameters variable + template: | + {%- set u = extra_callback_parameters["user"] | from_json -%} + {{- u.name.firstName }} {{ u.name.lastName -}} + email: + action: suggest + account_name: + template: | + {%- set u = extra_callback_parameters["user"] | from_json -%} + {{- u.name.firstName }} {{ u.name.lastName -}} +``` + +### Authelia + +These instructions assume that you have already enabled the OIDC provider support in [Authelia](https://www.authelia.com/). + +Add a client for MAS to Authelia's `configuration.yaml` (see the [Authelia OIDC documentation](https://www.authelia.com/configuration/identity-providers/openid-connect/clients/) for full details): + +```yaml +identity_providers: + oidc: + clients: + - client_id: "" # TO BE FILLED + client_name: Matrix + client_secret: "" # TO BE FILLED + public: false + redirect_uris: + - https:///upstream/callback/ + scopes: + - openid + - groups + - profile + - email + grant_types: + - 'refresh_token' + - 'authorization_code' + response_types: + - code +``` + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: + human_name: Authelia + issuer: "https://" # TO BE FILLED W/O ANY TRAILING SLASHES + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + token_endpoint_auth_method: client_secret_basic + scope: "openid profile email" + discovery_mode: insecure + claims_imports: + localpart: + action: require + template: "{{ user.preferred_username }}" + displayname: + action: suggest + template: "{{ user.name }}" + email: + action: suggest + template: "{{ user.email }}" +``` + + +### Authentik + +[Authentik](https://goauthentik.io/) is an open-source IdP solution. + +1. Create a provider in Authentik, with type OAuth2/OpenID. +2. The parameters are: + - Client Type: Confidential + - Redirect URIs: `https:///upstream/callback/` +3. Create an application for the authentication service in Authentik and link it to the provider. +4. Note the slug of your application, Client ID and Client Secret. + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: 01HFRQFT5QFMJFGF01P7JAV2ME + human_name: Authentik + issuer: "https:///application/o//" # TO BE FILLED + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + token_endpoint_auth_method: client_secret_basic + scope: "openid profile email" + claims_imports: + localpart: + action: require + template: "{{ user.preferred_username }}" + displayname: + action: suggest + template: "{{ user.name }}" + email: + action: suggest + template: "{{ user.email }}" +``` + + +### Facebook + +0. You will need a Facebook developer account. You can register for one [here](https://developers.facebook.com/async/registration/). +1. On the [apps](https://developers.facebook.com/apps/) page of the developer console, "Create App", and choose "Allow people to log in with their Facebook account". +2. Once the app is created, add "Facebook Login" and choose "Web". You don't + need to go through the whole form here. +3. In the left-hand menu, open "Use cases" > "Authentication and account creation" > "Customize" > "Settings" + * Add `https:///upstream/callback/` as an OAuth Redirect URL. +4. In the left-hand menu, open "App settings/Basic". Here you can copy the "App ID" and "App Secret" for use below. + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: "01HFS3WM7KSWCEQVJTN0V9X1W6" + issuer: "https://www.facebook.com" + human_name: "Facebook" + brand_name: "facebook" + discovery_mode: disabled + pkce_method: always + authorization_endpoint: "https://facebook.com/v11.0/dialog/oauth/" + token_endpoint: "https://graph.facebook.com/v11.0/oauth/access_token" + jwks_uri: "https://www.facebook.com/.well-known/oauth/openid/jwks/" + token_endpoint_auth_method: "client_secret_post" + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + scope: "openid" + claims_imports: + localpart: + action: ignore + displayname: + action: suggest + template: "{{ user.name }}" + email: + action: suggest + template: "{{ user.email }}" + account_name: + template: "{{ user.name }}" +``` + + +### GitLab + +1. Create a [new application](https://gitlab.com/profile/applications). +2. Add the `openid` scope. Optionally add the `profile` and `email` scope if you want to import the user's name and email. +3. Add this Callback URL: `https:///upstream/callback/` + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: "01HFS67GJ145HCM9ZASYS9DC3J" + issuer: "https://gitlab.com" + human_name: "GitLab" + brand_name: "gitlab" + token_endpoint_auth_method: "client_secret_post" + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + scope: "openid profile email" + claims_imports: + displayname: + action: suggest + template: "{{ user.name }}" + localpart: + action: ignore + email: + action: suggest + template: "{{ user.email }}" + account_name: + template: "@{{ user.preferred_username }}" +``` + +### GitHub + +GitHub doesn't support OpenID Connect, but it does support OAuth 2.0. +It will use the `fetch_userinfo` option with a manual `userinfo_endpoint` to fetch the user's profile through the GitHub API. + +1. Create a [new application](https://github.com/settings/applications/new). +2. Fill in the form with an application name and homepage URL. +3. Use the following Authorization callback URL: `https:///upstream/callback/` +4. Retrieve the Client ID +5. Generate a Client Secret and copy it + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: "01HFS67GJ145HCM9ZASYS9DC3J" + human_name: GitHub + brand_name: github + discovery_mode: disabled + fetch_userinfo: true + token_endpoint_auth_method: "client_secret_post" + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + authorization_endpoint: "https://github.com/login/oauth/authorize" + token_endpoint: "https://github.com/login/oauth/access_token" + userinfo_endpoint: "https://api.github.com/user" + scope: "read:user" + claims_imports: + subject: + template: "{{ userinfo_claims.id }}" + displayname: + action: suggest + template: "{{`{{ userinfo_claims.name }}" + localpart: + action: ignore + email: + action: suggest + template: "{{ userinfo_claims.email }}" + account_name: + template: "@{{ userinfo_claims.login }}" +``` + + +### Google + +1. Set up a project in the Google API Console (see [documentation](https://developers.google.com/identity/protocols/oauth2/openid-connect#appsetup)) +2. Add an "OAuth Client ID" for a Web Application under ["Credentials"](https://console.developers.google.com/apis/credentials) +3. Add the following "Authorized redirect URI": `https:///upstream/callback/` + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: 01HFS6S2SVAR7Y7QYMZJ53ZAGZ + human_name: Google + brand_name: "google" + issuer: "https://accounts.google.com" + token_endpoint_auth_method: "client_secret_post" + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + scope: "openid profile email" + claims_imports: + localpart: + action: ignore + displayname: + action: suggest + template: "{{ user.name }}" + email: + action: suggest + template: "{{ user.email }}" + account_name: + template: "{{ user.email }}" +``` + + +### Keycloak + + +Follow the [Getting Started Guide](https://www.keycloak.org/guides) to install Keycloak and set up a realm. + +1. Click `Clients` in the sidebar and click `Create` +2. Fill in the fields as below: + + | Field | Value | + |-----------|-----------| + | Client ID | `matrix-authentication-service` | + | Client Protocol | `openid-connect` | + +3. Click `Save` +4. Fill in the fields as below: + + | Field | Value | + |-----------|-----------| + | Client ID | `matrix-authentication-service` | + | Enabled | `On` | + | Client Protocol | `openid-connect` | + | Access Type | `confidential` | + | Valid Redirect URIs | `https:///upstream/callback/` | + | Front channel logout | `Off` | + | Backchannel logout URL | `https:///upstream/backchannel-logout/` | + | Backchannel logout session required | `On` | + +5. Click `Save` +6. On the Credentials tab, update the fields: + + | Field | Value | + |-------|-------| + | Client Authenticator | `Client ID and Secret` | + +7. Click `Regenerate Secret` +8. Copy Secret + +```yaml +upstream_oauth2: + providers: + - id: "01H8PKNWKKRPCBW4YGH1RWV279" + issuer: "https:///realms/" # TO BE FILLED + token_endpoint_auth_method: client_secret_basic + client_id: "matrix-authentication-service" + client_secret: "" # TO BE FILLED + scope: "openid profile email" + claims_imports: + localpart: + action: require + template: "{{ user.preferred_username }}" + displayname: + action: suggest + template: "{{ user.name }}" + email: + action: suggest + template: "{{ user.email }}" +``` + + +### Microsoft Azure Active Directory + +Azure AD can act as an OpenID Connect Provider. +Register a new application under *App registrations* in the Azure AD management console. +The `RedirectURI` for your application should point to your authentication service instance: +`https:///upstream/callback/` where `` is the same as in the config file. + +Go to *Certificates & secrets* and register a new client secret. +Make note of your Directory (tenant) ID as it will be used in the Azure links. + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: "01HFRPWGR6BG9SAGAKDTQHG2R2" + human_name: Microsoft Azure AD + issuer: "https://login.microsoftonline.com//v2.0" # TO BE FILLED + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + scope: "openid profile email" + discovery_mode: insecure + + claims_imports: + localpart: + action: require + template: "{{ (user.preferred_username | split('@'))[0] }}" + displayname: + action: suggest + template: "{{ user.name }}" + email: + action: suggest + template: "{{ user.email }}" + account_name: + template: "{{ user.preferred_username }}" +``` + +### Discord + +1. Create a new application in the Discord Developer Portal (see [documentation](https://discord.com/developers/applications)) +2. Add the following "Redirect URI" in the OAuth2 tab under settings: `https:///upstream/callback/` + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: 01JQK7DK6VFH62NMW4HS9RKD3R + human_name: Discord + brand_name: "discord" + token_endpoint_auth_method: "client_secret_post" + issuer: "https://discord.com" + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + fetch_userinfo: true + userinfo_endpoint: "https://discord.com/api/users/@me" + scope: "openid identify email" + claims_imports: + localpart: + action: suggest + template: "{{ user.username }}" + displayname: + action: suggest + template: "{{ user.global_name }}" + email: + action: suggest + template: "{{ user.email }}" + account_name: + template: "{{ user.username }}" +``` + + +### Rauthy + +1. Click `Clients` in the Rauthy Admin sidebar and click `Add new Client` +2. Fill in the fields as below: + + | Field | Value | + |-----------|-----------| + | Client ID | `matrix-authentication-service` | + | Client Name | `matrix-authentication-service` | + | Redirect URI | `https:///upstream/callback/` | + +3. Set the client to be `Confidential`. + +4. Click `Save` + +5. Select the client you just created from the clients list. +6. Enable the `authorization_code`, and `refresh_token` grant types. +7. Set the allowed scopes to `openid`, `profile`, and `email`. +8. Set both Access Algorithm and ID Algorithm to `RS256`. +9. Set PKCE challenge method to `S256`. +10. Click `Save` +11. Copy the `Client ID` from the `Config` tab and the `Client Secret` from the `Secret` tab. + + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: "01JFFHK7HJF70YSYF753GEWVRP" + human_name: Rauthy + issuer: "https:///auth/v1" # TO BE FILLED + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + scope: "openid profile email" + claims_imports: + localpart: + action: ignore + displayname: + action: suggest + template: "{{ user.given_name }}" + email: + action: suggest + template: "{{ user.email }}" +``` + +To use a Rauthy-supported [Ephemeral Client](https://sebadob.github.io/rauthy/work/ephemeral_clients.html#ephemeral-clients), use this JSON document: + +```json +{ + "client_id": "https://path.to.this.json", + "redirect_uris": [ + "https://your-app.com/callback" + ], + "grant_types": [ + "authorization_code", + "refresh_token" + ], + "access_token_signed_response_alg": "RS256", + "id_token_signed_response_alg": "RS256" +} +``` + + +### Shibboleth + +[Shibboleth](https://www.shibboleth.net/) is an open-source identity management system commonly used by universities and research institutions. +It is primarily based on SAML but also supports OIDC via the [OIDC OP Plugin](https://shibboleth.atlassian.net/wiki/spaces/IDPPLUGINS/pages/1376878976/OIDC+OP). + +These instructions assume you have a running Shibboleth instance with the OIDC plugin configured. + +Register MAS as a relying party in Shibboleth: + +1. Add a metadata file (e.g. `mas-metadata.xml`) to `%{idp.home}/metadata/` with the following content: + + ```xml + + + + + + + + + + + authorization_code + code + + + + + + ``` + + Replace ``, ``, ``, and `` with your values. + +2. Reference the metadata file in `%{idp.home}/conf/metadata-providers.xml` and reload services. + +Authentication service configuration: + +```yaml +upstream_oauth2: + providers: + - id: 01JB6YS8N7Q2ZM9CPXW6V0KGRT + human_name: Shibboleth + issuer: "https:///" # TO BE FILLED + client_id: "" # TO BE FILLED + client_secret: "" # TO BE FILLED + token_endpoint_auth_method: client_secret_basic + scope: "openid profile email" + discovery_mode: insecure + fetch_userinfo: true + claims_imports: + localpart: + action: require + template: "{{ user.preferred_username }}" + displayname: + action: suggest + template: "{{ user.name }}" + email: + action: suggest + template: "{{ user.email }}" +``` diff --git a/matrix-authentication-service/docs/storybook/README.md b/matrix-authentication-service/docs/storybook/README.md new file mode 100644 index 00000000..2021b392 --- /dev/null +++ b/matrix-authentication-service/docs/storybook/README.md @@ -0,0 +1,2 @@ +This is a placeholder which is replaced by the built Storybook when building the documentation. +If you're seeing this, you're probably looking at the documentation source, and should look at the built documentation instead here: \ No newline at end of file diff --git a/matrix-authentication-service/docs/topics/access-token.md b/matrix-authentication-service/docs/topics/access-token.md new file mode 100644 index 00000000..16acc4a1 --- /dev/null +++ b/matrix-authentication-service/docs/topics/access-token.md @@ -0,0 +1,31 @@ +# Get an access token + +The [Matrix Authentication Service repository contains a simple shell script](https://github.com/element-hq/matrix-authentication-service/blob/main/misc/device-code-grant.sh) to interactively get an access token with arbitrary scopes. +It requires `sh`, `jq` and `curl` to be installed. +This can be run from anywhere, not necessarily from the host where MAS is running. + +```sh +sh ./misc/device-code-grant.sh [synapse-url] ... +``` + +This will prompt you to open a URL in your browser, finish the authentication flow, and print the access and refresh tokens. + +This can be used to get access to the MAS admin API: + +```sh +sh ./misc/device-code-grant.sh https://synapse.example.com/ urn:mas:admin +``` + +Or to the Synapse admin API: + +```sh +sh ./misc/device-code-grant.sh https://synapse.example.com/ urn:matrix:org.matrix.msc2967.client:api:* urn:synapse:admin:* +``` + +Or even both at the same time: + +```sh +sh ./misc/device-code-grant.sh https://synapse.example.com/ urn:matrix:org.matrix.msc2967.client:api:* urn:mas:admin urn:synapse:admin:* +``` + +Note that the token will only be valid for a short time (5 minutes by default) and needs to be revoked manually from the MAS user interface. diff --git a/matrix-authentication-service/docs/topics/admin-api.md b/matrix-authentication-service/docs/topics/admin-api.md new file mode 100644 index 00000000..8aa992c7 --- /dev/null +++ b/matrix-authentication-service/docs/topics/admin-api.md @@ -0,0 +1,272 @@ +# Admin API + +MAS provides a REST-like API for administrators to manage the service. +This API is intended to build tools on top of MAS, and is only available to administrators. + +> **Note:** This Admin API is now the correct way for external tools to interact with MAS. External access to the [Internal GraphQL API](../development/graphql.md) is deprecated and will be removed in a future release. + +## Enabling the API + +The API isn't exposed by default, and must be added to either a public or a private HTTP listener. +It is considered safe to expose the API to the public, as access to it is gated by the `urn:mas:admin` scope. + +To enable the API, tweak the [`http.listeners`](../reference/configuration.md#httplisteners) configuration section to add the `adminapi` resource: + +```yaml +http: + listeners: + - name: web + resources: + # Other public resources + - name: discovery + # … + - name: adminapi + binds: + - address: "[::]:8080" + # or to a separate, internal listener: + - name: internal + resources: + # Other internal resources + - name: health + - name: prometheus + # … + - name: adminapi + binds: + - host: localhost + port: 8081 +``` + +## Reference documentation + +The API is documented using the [OpenAPI specification](https://spec.openapis.org/oas/v3.1.0). +The API schema is available [here](../api/spec.json). +This schema can be viewed in tools like Swagger UI, available [here](../api/). + +If admin API is enabled, MAS will also serve the specification at `/api/spec.json`, with a Swagger UI available at `/api/doc/`. + +## Authentication + +All requests to the admin API are gated either using access tokens obtained using OAuth 2.0 grants, +or using personal access tokens (which must currently be issued through the Admin API). + +They must have the [`urn:mas:admin`](../reference/scopes.md#urnmasadmin) scope. + +### User-interactive tools + +If the intent is to build admin tools where the administrator logs in themselves, interactive grants like the [authorization code] grant or the [device authorization] grant should be used. + +In this case, whether the user can request admin access or not is defined by the `can_request_admin` attribute of the user. + +To try it out in Swagger UI, a client can be defined statically in the configuration file like this: + +```yaml +clients: + - client_id: 01J44Q10GR4AMTFZEEF936DTCM + # For the authorization_code grant, Swagger UI uses the client_secret_post authentication method + client_auth_method: client_secret_post + client_secret: wie9oh2EekeeDeithei9Eipaeh2sohte + redirect_uris: + # The Swagger UI callback in the hosted documentation + - https://element-hq.github.io/matrix-authentication-service/api/oauth2-redirect.html + # The Swagger UI callback hosted by the service + - https://mas.example.com/api/doc/oauth2-callback +``` + +Then, in Swagger UI, click on the "Authorize" button. +In the modal, enter the client ID and client secret **in the `authorizationCode` section**, select the `urn:mas:admin` scope and click on the "Authorize" button. + +### Automated tools + +If the intent is to build tools that are not meant to be used by humans, the client credentials grant should be used. + +In this case, the client must be listed in the [`policy.data.admin_clients`](../reference/configuration.md#policy) configuration option. + +```yaml +policy: + data: + admin_clients: + - 01J44QC8BCY7FCFM7WGHQGKMTJ +``` + +To try it out in Swagger UI, a client can be defined statically in the configuration file like this: + +```yaml +clients: + - client_id: 01J44QC8BCY7FCFM7WGHQGKMTJ + # For the client_credentials grant, Swagger UI uses the client_secret_basic authentication method + client_auth_method: client_secret_basic + client_secret: eequie6Oth4Ip2InahT5zuQu8OuPohLi +``` + +Then, in Swagger UI, click on the "Authorize" button. +In the modal, enter the client ID and client secret **in the `clientCredentials` section**, select the `urn:mas:admin` scope and click on the "Authorize" button. + + +## General API shape + +The API takes inspiration from the [JSON API](https://jsonapi.org/) specification for its request and response shapes. + +### Single resource + +When querying a single resource, the response is generally shaped like this: + +```json +{ + "data": { + "type": "type-of-the-resource", + "id": "unique-id-for-the-resource", + "attributes": { + "some-attribute": "some-value" + }, + "links": { + "self": "/api/admin/v1/type-of-the-resource/unique-id-for-the-resource" + } + }, + "links": { + "self": "/api/admin/v1/type-of-the-resource/unique-id-for-the-resource" + } +} +``` + +### List of resources + +When querying a list of resources, the response is generally shaped like this: + +```json +{ + "meta": { + "count": 42 + }, + "data": [ + { + "type": "type-of-the-resource", + "id": "unique-id-for-the-resource", + "attributes": { + "some-attribute": "some-value" + }, + "links": { + "self": "/api/admin/v1/type-of-the-resource/unique-id-for-the-resource" + } + }, + { "...": "..." }, + { "...": "..." } + ], + "links": { + "self": "/api/admin/v1/type-of-the-resource?page[first]=10&page[after]=some-id", + "first": "/api/admin/v1/type-of-the-resource?page[first]=10", + "last": "/api/admin/v1/type-of-the-resource?page[last]=10", + "next": "/api/admin/v1/type-of-the-resource?page[first]=10&page[after]=some-id", + "prev": "/api/admin/v1/type-of-the-resource?page[last]=10&page[before]=some-id" + } +} +``` + +The `meta` will have the total number of items in it, and the `links` object contains the links to the next and previous pages, if any. + +Pagination is cursor-based, where the ID of items is used as the cursor. +Resources can be paginated forwards using the `page[after]` and `page[first]` parameters, and backwards using the `page[before]` and `page[last]` parameters. + +### Error responses + +Error responses will use a 4xx or 5xx status code, with the following shape: + +```json +{ + "errors": [ + { + "title": "Error title" + } + ] +} +``` + +Well-known error codes are not yet specified. + +## Example + +With the following configuration: + +```yaml +clients: + - client_id: 01J44RKQYM4G3TNVANTMTDYTX6 + client_auth_method: client_secret_basic + client_secret: phoo8ahneir3ohY2eigh4xuu6Oodaewi + +policy: + data: + admin_clients: + - 01J44RKQYM4G3TNVANTMTDYTX6 +``` + +`curl` example to list the users that are not locked and have the `can_request_admin` flag set to `true`: + +```bash +CLIENT_ID=01J44RKQYM4G3TNVANTMTDYTX6 +CLIENT_SECRET=phoo8ahneir3ohY2eigh4xuu6Oodaewi + +# Get an access token +curl \ + -u "$CLIENT_ID:$CLIENT_SECRET" \ + -d "grant_type=client_credentials&scope=urn:mas:admin" \ + https://mas.example.com/oauth2/token \ + | jq -r '.access_token' \ + | read -r ACCESS_TOKEN + +# List users (The -g flag prevents curl from interpreting the brackets in the URL) +curl \ + -g \ + -H "Authorization: Bearer $ACCESS_TOKEN" \ + 'https://mas.example.com/api/admin/v1/users?filter[can_request_admin]=true&filter[status]=active&page[first]=100' \ + | jq +``` + +
+ +Sample output + + +```json +{ + "meta": { + "count": 2 + }, + "data": [ + { + "type": "user", + "id": "01J2KDPHTZYW3TAT1SKVAD63SQ", + "attributes": { + "username": "kilgore-trout", + "created_at": "2024-07-12T12:11:46.911578Z", + "locked_at": null, + "can_request_admin": true + }, + "links": { + "self": "/api/admin/v1/users/01J2KDPHTZYW3TAT1SKVAD63SQ" + } + }, + { + "type": "user", + "id": "01J3G5W8MRMBJ93ZYEGX2BN6NK", + "attributes": { + "username": "quentin", + "created_at": "2024-07-23T16:13:04.024378Z", + "locked_at": null, + "can_request_admin": true + }, + "links": { + "self": "/api/admin/v1/users/01J3G5W8MRMBJ93ZYEGX2BN6NK" + } + } + ], + "links": { + "self": "/api/admin/v1/users?filter[can_request_admin]=true&filter[status]=active&page[first]=100", + "first": "/api/admin/v1/users?filter[can_request_admin]=true&filter[status]=active&page[first]=100", + "last": "/api/admin/v1/users?filter[can_request_admin]=true&filter[status]=active&page[last]=100" + } +} +``` + +
+ +[authorization code]: ../topics/authorization.md#authorization-code-grant +[device authorization]: ../topics/authorization.md#device-authorization-grant diff --git a/matrix-authentication-service/docs/topics/authorization.md b/matrix-authentication-service/docs/topics/authorization.md new file mode 100644 index 00000000..1bdeb820 --- /dev/null +++ b/matrix-authentication-service/docs/topics/authorization.md @@ -0,0 +1,170 @@ +# Authorization and sessions + +The main job of the authentication service is to grant access to resources to clients, and to let resources know who is accessing them. +In less abstract terms, this means that the service is responsible for issuing access tokens and letting the homeserver (and other services) introspect those access tokens. + +## How access tokens work + +In MAS, the access token is an opaque string for which the service has metadata associated with it. +An access token has: + +- a subject, which is the user the token is issued for +- a list of [scopes](../reference/scopes.md) +- a client for which the token is issued +- a timeframe for which the token is valid + +On a single token, metadata is immutable: it doesn't change over time. +One exception is the validity of the token: the service may revoke a token before its expiration date. + +A typical client will get a short-lived access token (valid 5 minutes) along with a refresh token. +The refresh token can then be used to get a new access token without the user having to re-authenticate. + +## How Synapse behaves + +When an incoming request is made to Synapse, it will introspect the access token through the Matrix Authentication Service. +This is using a standard OAuth 2.0 introspection request ([RFC 7662]). + +Out of this request, Synapse will care about the following: + +- the `active` field, which tells if the token is valid or not +- the `sub` field, which tells which user the token is issued for. This is an opaque string, and Synapse saves the mapping between the Matrix user ID and the subject of the token in its own database +- in case Synapse doesn't know the presented subject, it will look at the `username` field, which it will use as the localpart for the user as fallback +- the `scope` field, which tells which scopes are granted to the token. More specifically, it will look for the following scopes: + - [`urn:matrix:org.matrix.msc2967.client:api:*`], which grants broad access to the whole Matrix C-S API + - [`urn:matrix:org.matrix.msc2967.client:device:AABBCC`], which encodes the Matrix device ID used by the client + - [`urn:synapse:admin:*`], which grants access to the Synapse admin API + +It's important to understand that when Synapse delegates authentication to MAS, Synapse no longer manages many user attributes. +This includes the user admin, locked, and deactivated status. + +## Compatibility sessions + +In addition to OAuth 2.0 sessions, for which we'll go into more details later, MAS also supports the legacy [`/_matrix/client/v3/login`](https://spec.matrix.org/v1.10/client-server-api/#get_matrixclientv3login) API. +This exists as a compatibility layer for clients that don't yet support OAuth 2.0, but has some restrictions compared to the way those sessions behaved in Synapse. + +When a client presents a compatibility access token to Synapse, MAS will make it look like to Synapse as if the token had the following scopes: + +- [`urn:matrix:org.matrix.msc2967.client:api:*`] +- [`urn:matrix:org.matrix.msc2967.client:device:AABBCC`] + +Which corresponds to the broad access to the Matrix C-S API and the device ID of the client, as one would expect from the legacy login API. +One important missing scope is [`urn:synapse:admin:*`], which means that the client won't have access to the Synapse admin API. + +This is the case even if the user has the `can_request_admin` attribute set to `true`, and this is by design: +the legacy login API doesn't have a way to request specific scopes, and we don't want to grant admin access to all clients that have a compatibility session. +This was the case in the past with Synapse, as the admin status was set on the user itself, but this is not the case anymore with MAS. + +## OAuth 2.0 sessions + +Modern clients are expected to use OAuth 2.0 to authenticate with the homeserver. +In OAuth 2.0/OIDC, there are multiple ways to start an OAuth 2.0 session called grants. + +An OAuth 2.0 session has three important properties: + +- the client, which is the application accessing the resource +- the user, which is the user for which the client is accessing the resource +- a set of scopes, which are the permission granted to the client + +There are two main ways to create a client in MAS: + +- through the OAuth 2.0 Dynamic Client Registration Protocol ([RFC 7591]) +- statically defined [in the configuration file](../reference/configuration.md#clients) + +### Authorized as a user or authorized as a client + +OAuth 2.0 has an interesting concept where a session can be authorized not just as a user, but also as a client. +This means an OAuth 2.0 session can be created without a user, and only with a client. +It is useful for automated machine-to-machine communication, and is often referred to as "service accounts". + +Synapse doesn't yet support this concept, and as such requesting any Synapse API, even the admin API, requires a user attached to the session. + +This isn't the case with MAS' GraphQL API, which can be accessed with a client-only session: +the API can be requested by a session which has the [`urn:mas:graphql:*`] and the [`urn:mas:admin`] scope without being backed by a user. + +### Supported authorization grants + +MAS supports a few different authorization grants for OAuth 2.0 sessions. +Whilst this section won't go into the technical details of how those grants work, it's important to understand what they are and what they are used for. + +| Grant type | Entity | User interaction | Matrix C-S API | Synapse Admin API | MAS Admin API | MAS Internal GraphQL API | +| --------------------------------------------------- | ------ | ---------------- | -------------- | ----------------- | ------------- | ------------------------ | +| [Authorization code](#authorization-code-grant) | User | Same device | Yes | Yes | Yes | Yes | +| [Device authorization](#device-authorization-grant) | User | Other device | Yes | Yes | Yes | Yes | +| [Client credentials](#client-credentials-grant) | Client | None | No | No[^admin] | Yes | Yes | + +[^admin]: The Synapse admin API doesn't strictly require a user, but Synapse doesn't support client-only sessions yet. In the future, it will be possible to leverage the client credentials grant to access the Synapse admin API. + +#### Authorization code grant + +The authorization code grant ([RFC 6749] section 4.1) is used to interactively log in the user on the same device as the client. +This is the most common grant for most Matrix clients and is targeted at human end users. + +The general idea is that the client (after registering itself) crafts an authorization URL that the user will visit in their web browser. +The authentication service does whatever it needs to do to authenticate the user, and once the user is authenticated and consented to the access request, the service redirects the user back to the client with an authorization code. +The client then exchanges this authorization code for an access token and a refresh token. + +This grant is not meant for automation: it requires user interaction on the same device as where the client lives. + +#### Device authorization grant + +The device authorization grant ([RFC 8628]) is similar to the authorization code grant, but separates the user interaction from where the client lives. + +A classic example of this grant is when a client is on a TV or a game console, where the user wouldn't want to enter their credentials on the device itself. +Instead, the user is shown a code on the device, which they then enter on a different device (like a phone or a computer) to authenticate. + +For Matrix, it has two main use cases: + +- for CLI tools (or other constrained clients) which can't open a web browser or can't catch a redirect +- for a "login from another existing device" feature, like the "login via QR code" described in [MSC4108] + +This grant isn't meant for automation either, as it still requires user interaction. + +#### Client credentials grant + +The client credentials grant ([RFC 6749] section 4.4) is a bit special, as it lets a client authenticate as itself, without a user. + +This has no meaning yet in the Matrix C-S API, but is useful for other APIs like the MAS GraphQL API. +It may also be used in the future as a foundation for a new Application Service API, replacing the current `hs_token`/`as_token` mechanism. + +This works by presenting the client credentials to get back an access token. +The simplest type of client credentials is a client ID and client secret pair, but MAS also supports client authentication with a JWT ([RFC 7523]), which is a robust way to authenticate clients without a shared secret. + +## Personal sessions (personal access tokens) + +Personal access tokens are a credential that can be issued to give access to a user, +with predefined scopes and a predefined expiry time. +Either before or after expiry, the owner of the token can regenerate it, which produces a new +access token with the same scopes but a new expiry time. + +Personal access tokens are intended to fulfill two basic use cases: + +1. an easy way to obtain a clean token for your own user, for use in automation and scripts; +2. a way to obtain a token for administrative access of another user, either for ad-hoc administrative operations or to set up a bot or similar service. + +In the future, users will be able to create their own personal access tokens, but this is currently not implemented +so (1) is currently not supported. + +For now, personal access tokens must be created, regenerated and revoked by administrators through the [Admin API], satisfying use case (2). +[Element Admin](https://github.com/element-hq/element-admin), available by default in Element Server Suite, can be used to do this interactively. +You can also use the online beta deployment at [admin-beta.element.dev](https://admin-beta.element.dev/). + +### Validity + +Personal sessions can be used so long as: + +- the owner (creator) of the token is still an active and unlocked user (or static OAuth 2 client); and +- the actor (target user, or user being controlled by the token) has not been deactivated. Though the actor is allowed to be locked. + + +[MSC4108]: https://github.com/matrix-org/matrix-spec-proposals/pull/4108 +[RFC 6749]: https://datatracker.ietf.org/doc/html/rfc6749 +[RFC 7523]: https://datatracker.ietf.org/doc/html/rfc7523 +[RFC 7591]: https://datatracker.ietf.org/doc/html/rfc7591 +[RFC 7662]: https://datatracker.ietf.org/doc/html/rfc7662 +[RFC 8628]: https://datatracker.ietf.org/doc/html/rfc8628 +[`urn:matrix:org.matrix.msc2967.client:api:*`]: ../reference/scopes.md#urnmatrixorgmatrixmsc2967clientapi +[`urn:matrix:org.matrix.msc2967.client:device:AABBCC`]: ../reference/scopes.md#urnmatrixorgmatrixmsc2967clientdevicedevice-id +[`urn:synapse:admin:*`]: ../reference/scopes.md#urnsynapseadmin +[`urn:mas:graphql:*`]: ../reference/scopes.md#urnmasgraphql +[`urn:mas:admin`]: ../reference/scopes.md#urnmasadmin +[Admin API]: ./admin-api.md diff --git a/matrix-authentication-service/docs/topics/policy.md b/matrix-authentication-service/docs/topics/policy.md new file mode 100644 index 00000000..7ea1c2c9 --- /dev/null +++ b/matrix-authentication-service/docs/topics/policy.md @@ -0,0 +1,74 @@ +# Policy engine + +A set of actions are controlled by a generic policy engine. +A decision of the policy engine is deterministically made based on three components: + + - The policy itself + - A static configuration + - The action to be performed + +The policy is a [Open Policy Agent (OPA)](https://www.openpolicyagent.org/) policy compiled into WebAssembly. +Matrix Authentication Service ships with a default policy which should be sufficient for most deployments. +It can be replaced with a custom policy if needed, which can be useful to implement custom authorization logic without recompiling the service. + +## Actions + +The policy engine mainly restricts three operations: + + - **User attributes**, which includes user registration, user profile updates, and user password changes. + - **Client registration**, when an OAuth 2.0 dynamic client registration is requested. + - **Authorization requests**, when a client requests an access token. + +Policies are only evaluated in user-facing contexts, and not in administrative contexts. +As such, they usually can be bypassed through the admin API or the CLI if needed. + +### User attributes + +The policy is evaluated in the following different scenarios: + + - [`register.rego`]: During user registration, either with password credentials or with an upstream OAuth 2.0 provider. This calls the [`email.rego`] policy as well. + - [`email.rego`]: When a user adds a new email address to their account. + +### Client registration + +The policy ([`client_registration.rego`]) is evaluated when a client sends their metadata through the OAuth 2.0 dynamic client registration API. +By default, it enforces a set of strict rules to make sure clients provide enough information about themselves, with coherent URLs. +This is useful in production environments, but can be relaxed in development environments. + +### Authorization requests + +The policy ([`authorization_grant.rego`]) is evaluated when a client requests an access token. +This only covers OAuth 2.0 sessions, not compatibility sessions. +It is evaluated for the authorization code grant, the client credentials grant and the device authorization grant. + +This is probably the most interesting policy, as it defines which scope can be granted to which user and which client. + +On evaluation, three main entities are available: + + - details about **the grant**, such as the type of grant and the requested scopes + - **the client** making the request + - **the user** with their attributes (only for the authorization code grant and the device authorization grant) + +The policy evaluation cannot *modify* the grant, only allow or deny it. +Therefore the client must know in advance which scope they want to request. + +This is an important concept to understand: what access a token has is stored in the session itself, therefore access to privileged scopes is only based on policy evaluation, not on user attributes. + +If we take the Synapse admin API access as an example, the fact that an access token has admin API access doesn't depend on attributes on the user *directly*. +Instead, it is during the creation of the session that: + + - the client asks for the corresponding scope (e.g. `urn:synapse:admin:*`) + - the policy engine decides whether to grant it or not + +The default policy shipped with the service does gate access to this scope based on a user attributes (`can_request_admin`), but this is not a requirement. + +It does make reasoning about admin access more complicated compared to a simple boolean flag on the user like what Synapse does, but it also allows for more complex authorization logic. +This is especially important as in the future it will make it possible to implement a more granular role-based access control system to fit more complex use cases. + +To understand the authorization process and how sessions are created, refer to the [authorization and sessions](./authorization.md) section. + + +[`register.rego`]: https://github.com/element-hq/matrix-authentication-service/blob/main/policies/register/register.rego +[`email.rego`]: https://github.com/element-hq/matrix-authentication-service/blob/main/policies/email/email.rego +[`client_registration.rego`]: https://github.com/element-hq/matrix-authentication-service/blob/main/policies/client_registration/client_registration.rego +[`authorization_grant.rego`]: https://github.com/element-hq/matrix-authentication-service/blob/main/policies/authorization_grant/authorization_grant.rego diff --git a/matrix-authentication-service/frontend/.browserlistrc b/matrix-authentication-service/frontend/.browserlistrc new file mode 100644 index 00000000..65d06365 --- /dev/null +++ b/matrix-authentication-service/frontend/.browserlistrc @@ -0,0 +1,7 @@ +last 2 Chrome versions, +last 2 Firefox versions, +Firefox ESR, +last 2 Opera versions, +last 2 Safari versions, +last 2 edge version, +not dead diff --git a/matrix-authentication-service/frontend/.gitignore b/matrix-authentication-service/frontend/.gitignore new file mode 100644 index 00000000..bef0bf15 --- /dev/null +++ b/matrix-authentication-service/frontend/.gitignore @@ -0,0 +1,8 @@ +# Copyright 2025 New Vector Ltd. +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +# Please see LICENSE files in the repository root for full details. + +/node_modules +/dist +/coverage diff --git a/matrix-authentication-service/frontend/.npmrc b/matrix-authentication-service/frontend/.npmrc new file mode 100644 index 00000000..b6f27f13 --- /dev/null +++ b/matrix-authentication-service/frontend/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/matrix-authentication-service/frontend/.postcssrc.json b/matrix-authentication-service/frontend/.postcssrc.json new file mode 100644 index 00000000..fc2f40df --- /dev/null +++ b/matrix-authentication-service/frontend/.postcssrc.json @@ -0,0 +1,8 @@ +{ + "plugins": { + "postcss-import": {}, + "tailwindcss/nesting": "postcss-nesting", + "tailwindcss": {}, + "autoprefixer": {} + } +} diff --git a/matrix-authentication-service/frontend/.storybook/locales.ts b/matrix-authentication-service/frontend/.storybook/locales.ts new file mode 100644 index 00000000..9a545003 --- /dev/null +++ b/matrix-authentication-service/frontend/.storybook/locales.ts @@ -0,0 +1,228 @@ +export type LocalazyLanguage = { + language: string; + region: string; + script: string; + isRtl: boolean; + localizedName: string; + name: string; + aliasOf?: string | null | undefined; + expansionOf?: string | null | undefined; + pluralType: (n: number) => "zero" | "one" | "two" | "many" | "few" | "other"; +}; +export type LocalazyFile = { + cdnHash: string; + file: string; + path: string; + library?: string | null | undefined; + module?: string | null | undefined; + buildType?: string | null | undefined; + productFlavors?: string[] | null | undefined; + cdnFiles: { [lang:string]: string }; +}; +export type LocalazyMetadata = { + projectUrl: string; + baseLocale: string; + languages: LocalazyLanguage[]; + files: LocalazyFile[]; +}; + +const localazyMetadata: LocalazyMetadata = { + projectUrl: "https://localazy.com/p/matrix-authentication-service", + baseLocale: "en", + languages: [ + { + language: "cs", + region: "", + script: "", + isRtl: false, + name: "Czech", + localizedName: "Čeština", + pluralType: (n) => { return (n===1) ? "one" : (n>=2 && n<=4) ? "few" : "other"; } + }, + { + language: "da", + region: "", + script: "", + isRtl: false, + name: "Danish", + localizedName: "Dansk", + pluralType: (n) => { return (n===1) ? "one" : "other"; } + }, + { + language: "de", + region: "", + script: "", + isRtl: false, + name: "German", + localizedName: "Deutsch", + pluralType: (n) => { return (n===1) ? "one" : "other"; } + }, + { + language: "en", + region: "", + script: "", + isRtl: false, + name: "English", + localizedName: "English", + pluralType: (n) => { return (n===1) ? "one" : "other"; } + }, + { + language: "et", + region: "", + script: "", + isRtl: false, + name: "Estonian", + localizedName: "Eesti", + pluralType: (n) => { return (n===1) ? "one" : "other"; } + }, + { + language: "fi", + region: "", + script: "", + isRtl: false, + name: "Finnish", + localizedName: "Suomi", + pluralType: (n) => { return (n===1) ? "one" : "other"; } + }, + { + language: "fr", + region: "", + script: "", + isRtl: false, + name: "French", + localizedName: "Français", + pluralType: (n) => { return (n===0 || n===1) ? "one" : "other"; } + }, + { + language: "hu", + region: "", + script: "", + isRtl: false, + name: "Hungarian", + localizedName: "Magyar", + pluralType: (n) => { return (n===1) ? "one" : "other"; } + }, + { + language: "nb", + region: "NO", + script: "", + isRtl: false, + name: "Norwegian Bokmål (Norway)", + localizedName: "Norsk bokmål (Norge)", + pluralType: (n) => { return (n===1) ? "one" : "other"; } + }, + { + language: "nl", + region: "", + script: "", + isRtl: false, + name: "Dutch", + localizedName: "Nederlands", + pluralType: (n) => { return (n===1) ? "one" : "other"; } + }, + { + language: "pl", + region: "", + script: "", + isRtl: false, + name: "Polish", + localizedName: "Polski", + pluralType: (n) => { return (n===1) ? "one" : ((n%10>=2 && n%10<=4) && ((n%100<12 || n%100>14))) ? "few" : "many"; } + }, + { + language: "pt", + region: "", + script: "", + isRtl: false, + name: "Portuguese", + localizedName: "Português", + pluralType: (n) => { return (n>=0 && n<=1) ? "one" : "other"; } + }, + { + language: "ru", + region: "", + script: "", + isRtl: false, + name: "Russian", + localizedName: "Русский", + pluralType: (n) => { return ((n%10===1) && (n%100!==11)) ? "one" : ((n%10>=2 && n%10<=4) && ((n%100<12 || n%100>14))) ? "few" : "many"; } + }, + { + language: "sv", + region: "", + script: "", + isRtl: false, + name: "Swedish", + localizedName: "Svenska", + pluralType: (n) => { return (n===1) ? "one" : "other"; } + }, + { + language: "uk", + region: "", + script: "", + isRtl: false, + name: "Ukrainian", + localizedName: "Українська", + pluralType: (n) => { return ((n%10===1) && (n%100!==11)) ? "one" : ((n%10>=2 && n%10<=4) && ((n%100<12 || n%100>14))) ? "few" : "many"; } + }, + { + language: "zh", + region: "", + script: "Hans", + isRtl: false, + name: "Simplified Chinese", + localizedName: "简体中文", + pluralType: (n) => { return "other"; } + } + ], + files: [ + { + cdnHash: "7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2", + file: "frontend.json", + path: "", + cdnFiles: { + "cs": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/cs/frontend.json", + "da": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/da/frontend.json", + "de": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/de/frontend.json", + "en": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/en/frontend.json", + "et": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/et/frontend.json", + "fi": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/fi/frontend.json", + "fr": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/fr/frontend.json", + "hu": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/hu/frontend.json", + "nb_NO": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/nb-NO/frontend.json", + "nl": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/nl/frontend.json", + "pl": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/pl/frontend.json", + "pt": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/pt/frontend.json", + "ru": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/ru/frontend.json", + "sv": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/sv/frontend.json", + "uk": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/uk/frontend.json", + "zh#Hans": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/7c203a8ac8bd48c3c4609a8effcd0fbac430f9b2/zh-Hans/frontend.json" + } + }, + { + cdnHash: "5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e", + file: "file.json", + path: "", + cdnFiles: { + "cs": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/cs/file.json", + "da": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/da/file.json", + "de": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/de/file.json", + "en": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/en/file.json", + "et": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/et/file.json", + "fi": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/fi/file.json", + "fr": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/fr/file.json", + "hu": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/hu/file.json", + "nb_NO": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/nb-NO/file.json", + "nl": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/nl/file.json", + "pl": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/pl/file.json", + "pt": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/pt/file.json", + "ru": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/ru/file.json", + "sv": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/sv/file.json", + "uk": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/uk/file.json", + "zh#Hans": "https://delivery.localazy.com/_a7686032324574572744739e0707/_e0/5b69b0350dccfd47c245a5d41c1b9fdf6912cc6e/zh-Hans/file.json" + } + } + ] +}; + +export default localazyMetadata; \ No newline at end of file diff --git a/matrix-authentication-service/frontend/.storybook/main.ts b/matrix-authentication-service/frontend/.storybook/main.ts new file mode 100644 index 00000000..b4ffa197 --- /dev/null +++ b/matrix-authentication-service/frontend/.storybook/main.ts @@ -0,0 +1,35 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { StorybookConfig } from "@storybook/react-vite"; + +const config: StorybookConfig = { + stories: ["../{src,stories}/**/*.stories.@(js|jsx|ts|tsx)"], + + addons: ["@storybook/addon-docs"], + + framework: "@storybook/react-vite", + + typescript: { + reactDocgen: "react-docgen-typescript", + }, + + core: { + disableTelemetry: true, + }, + + env: { + STORYBOOK: "true", + }, + + viteFinal: async (config) => { + // Serve the storybook-specific assets, which has the service worker + config.publicDir = ".storybook/public"; + return config; + }, +}; + +export default config; diff --git a/matrix-authentication-service/frontend/.storybook/preview-head.html b/matrix-authentication-service/frontend/.storybook/preview-head.html new file mode 100644 index 00000000..66d2626e --- /dev/null +++ b/matrix-authentication-service/frontend/.storybook/preview-head.html @@ -0,0 +1,9 @@ + + + diff --git a/matrix-authentication-service/frontend/.storybook/preview.tsx b/matrix-authentication-service/frontend/.storybook/preview.tsx new file mode 100644 index 00000000..3841d8d5 --- /dev/null +++ b/matrix-authentication-service/frontend/.storybook/preview.tsx @@ -0,0 +1,152 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { Decorator, Preview } from "@storybook/react-vite"; +import { TooltipProvider } from "@vector-im/compound-web"; +import { initialize, mswLoader } from "msw-storybook-addon"; +import { useEffect, useLayoutEffect } from "react"; +import { I18nextProvider } from "react-i18next"; +import "../src/entrypoints/shared.css"; +import i18n, { setupI18n } from "../src/i18n"; +import { DummyRouter } from "../src/test-utils/router"; +import { handlers } from "../tests/mocks/handlers"; +import localazyMetadata from "./locales"; + +initialize( + { + onUnhandledRequest: "bypass", + serviceWorker: { + url: "./mockServiceWorker.js", + }, + }, + handlers, +); + +setupI18n(); + +const allThemesClasses = [ + "cpd-theme-light", + "cpd-theme-light-hc", + "cpd-theme-dark", + "cpd-theme-dark-hc", +]; + +const ThemeSwitcher: React.FC<{ + theme: string; +}> = ({ theme }) => { + useLayoutEffect(() => { + document.documentElement.classList.remove(...allThemesClasses); + if (theme !== "system") { + document.documentElement.classList.add(`cpd-theme-${theme}`); + } + return () => document.documentElement.classList.remove(...allThemesClasses); + }, [theme]); + + return null; +}; + +const withThemeProvider: Decorator = (Story, context) => { + return ( + <> + + + + ); +}; + +const LocaleSwitcher: React.FC<{ + locale: string; +}> = ({ locale }) => { + useEffect(() => { + i18n.changeLanguage(locale); + }, [locale]); + + return null; +}; + +const withI18nProvider: Decorator = (Story, context) => { + return ( + <> + + + + + + ); +}; + +const withDummyRouter: Decorator = (Story, _context) => { + return ( + + + + ); +}; + +const withTooltipProvider: Decorator = (Story, _context) => { + return ( + + + + ); +}; + +const preview: Preview = { + loaders: [mswLoader], + parameters: { + controls: { + matchers: { + color: /(background|color)$/i, + date: /Date$/, + }, + }, + }, + decorators: [ + withI18nProvider, + withThemeProvider, + withDummyRouter, + withTooltipProvider, + ], + globalTypes: { + theme: { + name: "Theme", + description: "Global theme for components", + toolbar: { + icon: "circlehollow", + title: "Theme", + items: [ + { title: "System", value: "system", icon: "browser" }, + { title: "Light", value: "light", icon: "sun" }, + { title: "Light (high contrast)", value: "light-hc", icon: "sun" }, + { title: "Dark", value: "dark", icon: "moon" }, + { title: "Dark (high contrast)", value: "dark-hc", icon: "moon" }, + ], + }, + }, + + locale: { + name: "Locale", + description: "Locale for the app", + toolbar: { + title: "Language", + icon: "globe", + items: localazyMetadata.languages.map( + ({ language, localizedName, name }) => ({ + title: `${localizedName} (${name})`, + value: language, + }), + ), + }, + }, + }, + initialGlobals: { + locale: localazyMetadata.baseLocale, + theme: "system", + }, + tags: ["autodocs"], +}; + +export default preview; diff --git a/matrix-authentication-service/frontend/.storybook/public/mockServiceWorker.js b/matrix-authentication-service/frontend/.storybook/public/mockServiceWorker.js new file mode 100644 index 00000000..258b1b1e --- /dev/null +++ b/matrix-authentication-service/frontend/.storybook/public/mockServiceWorker.js @@ -0,0 +1,349 @@ +/* eslint-disable */ +/* tslint:disable */ + +/** + * Mock Service Worker. + * @see https://github.com/mswjs/msw + * - Please do NOT modify this file. + */ + +const PACKAGE_VERSION = '2.12.8' +const INTEGRITY_CHECKSUM = '4db4a41e972cec1b64cc569c66952d82' +const IS_MOCKED_RESPONSE = Symbol('isMockedResponse') +const activeClientIds = new Set() + +addEventListener('install', function () { + self.skipWaiting() +}) + +addEventListener('activate', function (event) { + event.waitUntil(self.clients.claim()) +}) + +addEventListener('message', async function (event) { + const clientId = Reflect.get(event.source || {}, 'id') + + if (!clientId || !self.clients) { + return + } + + const client = await self.clients.get(clientId) + + if (!client) { + return + } + + const allClients = await self.clients.matchAll({ + type: 'window', + }) + + switch (event.data) { + case 'KEEPALIVE_REQUEST': { + sendToClient(client, { + type: 'KEEPALIVE_RESPONSE', + }) + break + } + + case 'INTEGRITY_CHECK_REQUEST': { + sendToClient(client, { + type: 'INTEGRITY_CHECK_RESPONSE', + payload: { + packageVersion: PACKAGE_VERSION, + checksum: INTEGRITY_CHECKSUM, + }, + }) + break + } + + case 'MOCK_ACTIVATE': { + activeClientIds.add(clientId) + + sendToClient(client, { + type: 'MOCKING_ENABLED', + payload: { + client: { + id: client.id, + frameType: client.frameType, + }, + }, + }) + break + } + + case 'CLIENT_CLOSED': { + activeClientIds.delete(clientId) + + const remainingClients = allClients.filter((client) => { + return client.id !== clientId + }) + + // Unregister itself when there are no more clients + if (remainingClients.length === 0) { + self.registration.unregister() + } + + break + } + } +}) + +addEventListener('fetch', function (event) { + const requestInterceptedAt = Date.now() + + // Bypass navigation requests. + if (event.request.mode === 'navigate') { + return + } + + // Opening the DevTools triggers the "only-if-cached" request + // that cannot be handled by the worker. Bypass such requests. + if ( + event.request.cache === 'only-if-cached' && + event.request.mode !== 'same-origin' + ) { + return + } + + // Bypass all requests when there are no active clients. + // Prevents the self-unregistered worked from handling requests + // after it's been terminated (still remains active until the next reload). + if (activeClientIds.size === 0) { + return + } + + const requestId = crypto.randomUUID() + event.respondWith(handleRequest(event, requestId, requestInterceptedAt)) +}) + +/** + * @param {FetchEvent} event + * @param {string} requestId + * @param {number} requestInterceptedAt + */ +async function handleRequest(event, requestId, requestInterceptedAt) { + const client = await resolveMainClient(event) + const requestCloneForEvents = event.request.clone() + const response = await getResponse( + event, + client, + requestId, + requestInterceptedAt, + ) + + // Send back the response clone for the "response:*" life-cycle events. + // Ensure MSW is active and ready to handle the message, otherwise + // this message will pend indefinitely. + if (client && activeClientIds.has(client.id)) { + const serializedRequest = await serializeRequest(requestCloneForEvents) + + // Clone the response so both the client and the library could consume it. + const responseClone = response.clone() + + sendToClient( + client, + { + type: 'RESPONSE', + payload: { + isMockedResponse: IS_MOCKED_RESPONSE in response, + request: { + id: requestId, + ...serializedRequest, + }, + response: { + type: responseClone.type, + status: responseClone.status, + statusText: responseClone.statusText, + headers: Object.fromEntries(responseClone.headers.entries()), + body: responseClone.body, + }, + }, + }, + responseClone.body ? [serializedRequest.body, responseClone.body] : [], + ) + } + + return response +} + +/** + * Resolve the main client for the given event. + * Client that issues a request doesn't necessarily equal the client + * that registered the worker. It's with the latter the worker should + * communicate with during the response resolving phase. + * @param {FetchEvent} event + * @returns {Promise} + */ +async function resolveMainClient(event) { + const client = await self.clients.get(event.clientId) + + if (activeClientIds.has(event.clientId)) { + return client + } + + if (client?.frameType === 'top-level') { + return client + } + + const allClients = await self.clients.matchAll({ + type: 'window', + }) + + return allClients + .filter((client) => { + // Get only those clients that are currently visible. + return client.visibilityState === 'visible' + }) + .find((client) => { + // Find the client ID that's recorded in the + // set of clients that have registered the worker. + return activeClientIds.has(client.id) + }) +} + +/** + * @param {FetchEvent} event + * @param {Client | undefined} client + * @param {string} requestId + * @param {number} requestInterceptedAt + * @returns {Promise} + */ +async function getResponse(event, client, requestId, requestInterceptedAt) { + // Clone the request because it might've been already used + // (i.e. its body has been read and sent to the client). + const requestClone = event.request.clone() + + function passthrough() { + // Cast the request headers to a new Headers instance + // so the headers can be manipulated with. + const headers = new Headers(requestClone.headers) + + // Remove the "accept" header value that marked this request as passthrough. + // This prevents request alteration and also keeps it compliant with the + // user-defined CORS policies. + const acceptHeader = headers.get('accept') + if (acceptHeader) { + const values = acceptHeader.split(',').map((value) => value.trim()) + const filteredValues = values.filter( + (value) => value !== 'msw/passthrough', + ) + + if (filteredValues.length > 0) { + headers.set('accept', filteredValues.join(', ')) + } else { + headers.delete('accept') + } + } + + return fetch(requestClone, { headers }) + } + + // Bypass mocking when the client is not active. + if (!client) { + return passthrough() + } + + // Bypass initial page load requests (i.e. static assets). + // The absence of the immediate/parent client in the map of the active clients + // means that MSW hasn't dispatched the "MOCK_ACTIVATE" event yet + // and is not ready to handle requests. + if (!activeClientIds.has(client.id)) { + return passthrough() + } + + // Notify the client that a request has been intercepted. + const serializedRequest = await serializeRequest(event.request) + const clientMessage = await sendToClient( + client, + { + type: 'REQUEST', + payload: { + id: requestId, + interceptedAt: requestInterceptedAt, + ...serializedRequest, + }, + }, + [serializedRequest.body], + ) + + switch (clientMessage.type) { + case 'MOCK_RESPONSE': { + return respondWithMock(clientMessage.data) + } + + case 'PASSTHROUGH': { + return passthrough() + } + } + + return passthrough() +} + +/** + * @param {Client} client + * @param {any} message + * @param {Array} transferrables + * @returns {Promise} + */ +function sendToClient(client, message, transferrables = []) { + return new Promise((resolve, reject) => { + const channel = new MessageChannel() + + channel.port1.onmessage = (event) => { + if (event.data && event.data.error) { + return reject(event.data.error) + } + + resolve(event.data) + } + + client.postMessage(message, [ + channel.port2, + ...transferrables.filter(Boolean), + ]) + }) +} + +/** + * @param {Response} response + * @returns {Response} + */ +function respondWithMock(response) { + // Setting response status code to 0 is a no-op. + // However, when responding with a "Response.error()", the produced Response + // instance will have status code set to 0. Since it's not possible to create + // a Response instance with status code 0, handle that use-case separately. + if (response.status === 0) { + return Response.error() + } + + const mockedResponse = new Response(response.body, response) + + Reflect.defineProperty(mockedResponse, IS_MOCKED_RESPONSE, { + value: true, + enumerable: true, + }) + + return mockedResponse +} + +/** + * @param {Request} request + */ +async function serializeRequest(request) { + return { + url: request.url, + mode: request.mode, + method: request.method, + headers: Object.fromEntries(request.headers.entries()), + cache: request.cache, + credentials: request.credentials, + destination: request.destination, + integrity: request.integrity, + redirect: request.redirect, + referrer: request.referrer, + referrerPolicy: request.referrerPolicy, + body: await request.arrayBuffer(), + keepalive: request.keepalive, + } +} diff --git a/matrix-authentication-service/frontend/codegen.ts b/matrix-authentication-service/frontend/codegen.ts new file mode 100644 index 00000000..2020638e --- /dev/null +++ b/matrix-authentication-service/frontend/codegen.ts @@ -0,0 +1,33 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { CodegenConfig } from "@graphql-codegen/cli"; + +const config: CodegenConfig = { + schema: "./schema.graphql", + documents: ["src/**/*.{tsx,ts}", "!src/gql/**/*"], + ignoreNoDocuments: true, // for better experience with the watcher + generates: { + "./src/gql/": { + preset: "client", + plugins: ["typescript-msw"], + config: { + documentMode: "string", + useTypeImports: true, + enumsAsTypes: true, + // By default, unknown scalars are generated as `any`. This is not ideal for catching potential bugs. + defaultScalarType: "unknown", + maybeValue: "T | null | undefined", + scalars: { + DateTime: "string", + Url: "string", + }, + }, + }, + }, +}; + +export default config; diff --git a/matrix-authentication-service/frontend/graphql.config.json b/matrix-authentication-service/frontend/graphql.config.json new file mode 100644 index 00000000..1e3f8b17 --- /dev/null +++ b/matrix-authentication-service/frontend/graphql.config.json @@ -0,0 +1,4 @@ +{ + "schema": "./schema.graphql", + "documents": "./src/**/*" +} diff --git a/matrix-authentication-service/frontend/i18next.config.ts b/matrix-authentication-service/frontend/i18next.config.ts new file mode 100644 index 00000000..04453eec --- /dev/null +++ b/matrix-authentication-service/frontend/i18next.config.ts @@ -0,0 +1,18 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { defineConfig } from "i18next-cli"; + +export default defineConfig({ + locales: ["en"], + extract: { + input: "src/**/*.{ts,tsx}", + output: "locales/{{language}}.json", + defaultNS: false, + pluralSeparator: ":", + keySeparator: ".", + sort: true, + }, +}); diff --git a/matrix-authentication-service/frontend/index.html b/matrix-authentication-service/frontend/index.html new file mode 100644 index 00000000..cdbc8a69 --- /dev/null +++ b/matrix-authentication-service/frontend/index.html @@ -0,0 +1,28 @@ + + + + + + + + + + matrix-authentication-service + + + + +
+ + + diff --git a/matrix-authentication-service/frontend/knip.config.ts b/matrix-authentication-service/frontend/knip.config.ts new file mode 100644 index 00000000..be6e73eb --- /dev/null +++ b/matrix-authentication-service/frontend/knip.config.ts @@ -0,0 +1,20 @@ +// Copyright 2024, 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { KnipConfig } from "knip"; + +export default { + entry: ["src/entrypoints/*", "src/routes/*"], + ignore: [ + "src/gql/*", + "src/routeTree.gen.ts", + ".storybook/locales.ts", + "i18next.config.ts", + ], + ignoreDependencies: [ + // This is used by the tailwind PostCSS plugin, but not detected by knip + "postcss-nesting", + ], +} satisfies KnipConfig; diff --git a/matrix-authentication-service/frontend/locales/cs.json b/matrix-authentication-service/frontend/locales/cs.json new file mode 100644 index 00000000..443cf8ce --- /dev/null +++ b/matrix-authentication-service/frontend/locales/cs.json @@ -0,0 +1,404 @@ +{ + "action": { + "back": "Zpět", + "cancel": "Zrušit", + "clear": "Vymazat", + "close": "Zavřít", + "collapse": "Sbalit", + "confirm": "Potvrdit", + "continue": "Pokračovat", + "edit": "Upravit", + "expand": "Rozbalit", + "save": "Uložit", + "save_and_continue": "Uložit a pokračovat", + "sign_out": "Odhlásit", + "start_over": "Začít znovu" + }, + "branding": { + "privacy_policy": { + "alt": "Odkaz na zásady ochrany osobních údajů služby", + "link": "Zásady ochrany osobních údajů" + }, + "terms_and_conditions": { + "alt": "Odkaz na smluvní podmínky služby", + "link": "Všeobecné obchodní podmínky" + } + }, + "common": { + "add": "Přidat", + "e2ee": "Koncové šifrování", + "error": "Chyba", + "loading": "Načítání…", + "next": "Další", + "password": "Heslo", + "previous": "Předchozí", + "saved": "Uloženo", + "saving": "Ukládání..." + }, + "frontend": { + "account": { + "account_password": "Heslo účtu", + "contact_info": "Kontaktní informace", + "delete_account": { + "alert_description": "Tento účet bude trvale smazán a nebudete mít přístup k žádné ze svých zpráv.", + "alert_title": "Přijdete o všechna svá data", + "button": "Smazat účet", + "dialog_description": "Potvrďte, že chcete smazat svůj účet:\n\n\nNebudete moci svůj účet znovu aktivovat\nNebudete se už moci přihlásit\nNikdo nebude moci znovu použít vaše uživatelské jméno (MXID), včetně vás\nOpustíte všechny místnosti a přímé zprávy, ve kterých se nacházíte\nBudete odstraněni ze serveru identity a nikdo vás nebude moci najít pomocí vašeho e-mailu nebo telefonního čísla\n\nVaše staré zprávy budou stále viditelné pro lidi, kteří je obdrželi. Chcete skrýt své odeslané zprávy před lidmi, kteří vstoupí do místnosti v budoucnu?", + "dialog_title": "Smazat tento účet?", + "erase_checkbox_label": "Ano, skrýt všechny mé zprávy před novými členy", + "incorrect_password": "Nesprávné heslo, zkuste to prosím znovu", + "mxid_label": "Potvrďte své Matrix ID ({{ mxid }})", + "mxid_mismatch": "Tato hodnota neodpovídá vašemu Matrix ID", + "password_label": "Zadejte své heslo pro pokračování" + }, + "edit_profile": { + "display_name_help": "Tohle uvidí ostatní, ať už jste přihlášeni kdekoli.", + "display_name_label": "Zobrazované jméno", + "title": "Upravit profil", + "username_label": "Uživatelské jméno" + }, + "password": { + "change": "Změna hesla", + "change_disabled": "Změny hesla jsou zakázány správcem.", + "label": "Heslo" + }, + "sign_out": { + "button": "Odhlásit se z účtu", + "dialog": "Odhlásit se z tohoto účtu?" + }, + "title": "Váš účet" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Zadaný e-mail není povolen zásadami serveru.", + "title": "E-mail zamítnut zásadami" + }, + "email_denied_error": "Zadaný e-mail není zásadami serveru povolen.", + "email_exists_alert": { + "text": "Zadaný e-mail je již přidán do tohoto účtu", + "title": "E-mail již existuje" + }, + "email_exists_error": "Zadaný e-mail je již přidán do tohoto účtu", + "email_field_help": "Přidejte alternativní e-mail, který můžete použít pro přístup k tomuto účtu.", + "email_field_label": "Přidat e-mail", + "email_in_use_error": "Zadaný e-mail se již používá", + "email_invalid_alert": { + "text": "Zadaný email je neplatný", + "title": "Neplatný e-mail" + }, + "email_invalid_error": "Zadaný email je neplatný", + "incorrect_password_error": "Nesprávné heslo, zkuste to prosím znovu", + "password_confirmation": "Potvrďte heslo k účtu při přidání této e-mailové adresy." + }, + "app_sessions_list": { + "error": "Nepodařilo se načíst relace aplikace", + "heading": "Aplikace" + }, + "browser_session_details": { + "current_badge": "Aktuální", + "session_details_title": "Relace" + }, + "browser_sessions_overview": { + "body:one": "{{count}} aktivní relace", + "body:few": "{{count}} aktivní relace", + "body:other": "{{count}} aktivních relací", + "heading": "Prohlížeče", + "no_active_sessions": { + "default": "Nejste přihlášeni do žádného webového prohlížeče.", + "inactive_90_days": "Všechny vaše relace byly aktivní v posledních 90 dnech." + }, + "view_all_button": "Zobrazit vše" + }, + "compat_session_detail": { + "client_details_title": "Informace o klientovi", + "name": "Jméno", + "session_details_title": "Relace" + }, + "device_type_icon_label": { + "desktop": "Desktop", + "mobile": "Mobilní", + "pc": "Počítač", + "tablet": "Tablet", + "unknown": "Neznámý typ zařízení", + "web": "Web" + }, + "email_in_use": { + "heading": "E-mailová adresa {{email}} se již používá." + }, + "end_session_button": { + "confirmation_modal_title": "Opravdu chcete ukončit tuto relaci?", + "text": "Odstranit zařízení" + }, + "error": { + "hideDetails": "Skrýt podrobnosti", + "showDetails": "Zobrazit detaily", + "subtitle": "Došlo k neočekávané chybě. Zkuste to prosím znovu.", + "title": "Něco se pokazilo" + }, + "error_boundary_title": "Něco se pokazilo", + "errors": { + "field_required": "Toto pole je povinné", + "rate_limit_exceeded": "V krátké době jste podali příliš mnoho žádostí. Počkejte prosím několik minut a zkuste to znovu." + }, + "last_active": { + "active_date": "Aktivní {{relativeDate}}", + "active_now": "Nyní aktivní", + "inactive_90_days": "Neaktivní více než 90 dní" + }, + "nav": { + "devices": "Zařízení", + "plan": "Plán", + "profile": "Profil", + "sessions": "Relace", + "settings": "Nastavení" + }, + "not_found_alert_title": "Nenalezeno.", + "not_logged_in_alert": "Nejste přihlášeni.", + "oauth2_client_detail": { + "details_title": "Informace o klientovi", + "id": "ID klienta", + "name": "Jméno", + "policy": "Zásady", + "terms": "Podmínky služby" + }, + "oauth2_session_detail": { + "client_details_name": "Jméno", + "client_title": "Informace o klientovi", + "session_details_title": "Relace" + }, + "pagination_controls": { + "total": "Celkem: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Současné heslo", + "failure": { + "description": { + "account_locked": "Váš účet je uzamčen a v tuto chvíli jej nelze obnovit. Pokud to neočekáváte, obraťte se na správce serveru.", + "expired_recovery_ticket": "Platnost odkazu na obnovení vypršela. Spusťte prosím proces obnovy účtu znovu od začátku.", + "invalid_new_password": "Nové heslo, které jste si zvolili, je neplatné. Nemusí splňovat nakonfigurované zásady zabezpečení.", + "no_current_password": "Nemáte aktuální heslo.", + "no_such_recovery_ticket": "Odkaz na obnovení je neplatný. Pokud jste odkaz zkopírovali z e-mailu pro obnovení, zkontrolujte, zda byl zkopírován celý odkaz.", + "password_changes_disabled": "Změny hesla jsou zakázány.", + "recovery_ticket_already_used": "Odkaz na obnovení již byl použit. Nelze jej použít znovu.", + "unspecified": "Může se jednat o dočasný problém, zkuste to prosím znovu později. Pokud problém přetrvává, obraťte se na správce serveru.", + "wrong_password": "Heslo, které jste zadali jako aktuální heslo, je nesprávné. Zkuste to prosím znovu." + }, + "title": "Nepodařilo se aktualizovat heslo" + }, + "new_password_again_label": "Znovu zadejte nové heslo", + "new_password_label": "Nové heslo", + "passwords_match": "Hesla se shodují!", + "passwords_no_match": "Hesla se neshodují", + "subtitle": "Zvolte nové heslo pro svůj účet.", + "success": { + "description": "Vaše heslo bylo úspěšně aktualizováno.", + "title": "Heslo aktualizováno" + }, + "title": "Změňte své heslo" + }, + "password_reset": { + "consumed": { + "subtitle": "Chcete-li vytvořit nové heslo, začněte znovu a vyberte „Zapomenuté heslo“.", + "title": "Odkaz na obnovení hesla již byl použit" + }, + "expired": { + "resend_email": "Znovu odeslat e-mail", + "subtitle": "Požádejte o nový e-mail, který bude zaslán na adresu: {{email}}", + "title": "Platnost odkazu na obnovení hesla vypršela" + }, + "subtitle": "Zvolte nové heslo pro svůj účet.", + "title": "Obnovte své heslo" + }, + "password_strength": { + "placeholder": "Síla hesla", + "score": { + "0": "Extrémně slabé heslo", + "1": "Velmi slabé heslo", + "2": "Slabé heslo", + "3": "Silné heslo", + "4": "Velmi silné heslo" + }, + "suggestion": { + "all_uppercase": "Některá písmena pište s velkými písmeny ale ne všechny.", + "another_word": "Přidejte další slova, která jsou méně běžná.", + "associated_years": "Vyhněte se rokům, které jsou s vámi spojeny.", + "capitalization": "Velká písmena pište více než jen první písmeno.", + "dates": "Vyhněte se datům a rokům, které jsou s vámi spojeny.", + "l33t": "Vyhněte se předvídatelným nahrazením písmen jako „@“ za „a“.", + "longer_keyboard_pattern": "Používejte delší vzory kláves a několikrát změňte směr psaní.", + "no_need": "Silná hesla můžete vytvářet bez použití symbolů, čísel nebo velkých písmen.", + "pwned": "Pokud toto heslo používáte jinde, měli byste ho změnit.", + "recent_years": "Vyhněte se posledním letům.", + "repeated": "Vyhněte se opakovaným slovům a znakům.", + "reverse_words": "Vyhněte se obrácenému hláskování běžných slov.", + "sequences": "Vyhněte se běžným sekvencím znaků.", + "use_words": "Používejte více slov, ale vyhněte se běžným frázím." + }, + "too_weak": "Toto heslo je příliš slabé", + "warning": { + "common": "Toto je běžně používané heslo.", + "common_names": "Běžná jména a příjmení lze snadno uhodnout.", + "dates": "Data lze snadno uhodnout.", + "extended_repeat": "Opakované vzory znaků jako „abcabcabc“ lze snadno uhodnout.", + "key_pattern": "Krátké vzory klávesnice lze snadno odhadnout.", + "names_by_themselves": "Jednotlivá jména nebo příjmení lze snadno uhodnout.", + "pwned": "Vaše heslo bylo odhaleno při úniku dat na internetu.", + "recent_years": "Poslední roky lze snadno odhadnout.", + "sequences": "Běžné sekvence znaků jako „abc“ lze snadno uhodnout.", + "similar_to_common": "Je to podobné jako u běžně používaného hesla.", + "simple_repeat": "Opakované znaky jako „aaa“ lze snadno uhodnout.", + "straight_row": "Rovné řady kláves na klávesnici lze snadno uhodnout.", + "top_hundred": "Toto je často používané heslo.", + "top_ten": "Toto je silně používané heslo.", + "user_inputs": "Neměly by existovat žádné osobní údaje ani údaje související se stránkami.", + "word_by_itself": "Jednotlivá slova lze snadno uhodnout." + } + }, + "reset_cross_signing": { + "button": "Obnovit identitu", + "cancelled": { + "description_1": "Toto okno můžete zavřít a vrátit se do aplikace, abyste mohli pokračovat.", + "description_2": "Pokud jste se odhlásili všude a nepamatujete si svůj kód pro obnovení, budete muset obnovit svou identitu.", + "heading": "Obnovení identity zrušeno." + }, + "description": "Pokud nejste přihlášeni k žádnému jinému zařízení a ztratili jste klíč pro obnovení, budete muset obnovit svou identitu, abyste mohli aplikaci dále používat.", + "effect_list": { + "negative_1": "Ztratíte svou historii zpráv", + "negative_2": "Bude nutné znovu ověřit všechna stávající zařízení a kontakty.", + "neutral_1": "Ztratíte historii zpráv, která je uložena pouze na serveru", + "neutral_2": "Bude nutné znovu ověřit všechna stávající zařízení a kontakty.", + "positive_1": "Podrobnosti o vašem účtu, kontakty, předvolby a seznam chatu budou uchovávány" + }, + "failure": { + "description": "Může se jednat o dočasný problém, zkuste to prosím znovu později. Pokud problém přetrvává, obraťte se na správce serveru.", + "heading": "Nepodařilo se povolit obnovení kryptoidentity", + "title": "Nepodařilo se povolit krypto identitu" + }, + "finish_reset": "Dokončit obnovení", + "heading": "Obnovte svou identitu v případě, že nemůžete potvrdit jiným způsobem", + "start_reset": "Spustit obnovení", + "success": { + "description": "Obnovení identity bylo schváleno na další {{minutes}} minuty. Toto okno můžete zavřít a vrátit se do aplikace a pokračovat.", + "heading": "Identita byla úspěšně obnovena. Vraťte se do aplikace a dokončete proces.", + "title": "Obnovení krypto identity dočasně povoleno" + }, + "warning": "Identitu obnovujte pouze v případě, že nemáte přístup k jinému přihlášenému zařízení a ztratili jste klíč pro obnovení." + }, + "selectable_session": { + "label": "Vybrat relaci" + }, + "session": { + "client_id_label": "ID klienta", + "current": "Aktuální", + "current_badge": "Aktuální", + "device_id_label": "ID zařízení", + "finished_date": "Dokončeno ", + "finished_label": "Dokončeno", + "generic_browser_session": "Relace prohlížeče", + "id_label": "ID", + "ip_label": "IP adresa", + "last_active_label": "Naposledy aktivní", + "last_auth_label": "Poslední ověření", + "name_for_platform": "{{name}} pro {{platform}}", + "scopes_label": "Rozsahy", + "set_device_name": { + "help": "Nastavte název, který vám pomůže identifikovat toto zařízení.", + "label": "Název zařízení", + "title": "Upravit název zařízení" + }, + "signed_in_date": "Přihlášen ", + "signed_in_label": "Přihlášen", + "title": "Podrobnosti o zařízení", + "unknown_browser": "Neznámý prohlížeč", + "unknown_device": "Neznámé zařízení", + "uri_label": "Uri", + "user_id_label": "ID uživatele", + "username_label": "Uživatelské jméno" + }, + "session_detail": { + "alert": { + "button": "Zpět", + "text": "Tato relace neexistuje nebo již není aktivní.", + "title": "Nelze najít relaci: {{deviceId}}" + } + }, + "unknown_route": "Neznámá trasa {{route}}", + "unverified_email_alert": { + "button": "Zkontrolujte a ověřte", + "text:one": "Máte {{count}} neověřenou e-mailovou adresu.", + "text:few": "Máte {{count}} neověřené e-mailové adresy.", + "text:other": "Máte {{count}} neověřených e-mailových adres.", + "title": "Neověřený e-mail" + }, + "user_email": { + "cant_delete_primary": "Zvolte jiný primární e-mail a odstraňte tento e-mail.", + "delete_button_confirmation_modal": { + "action": "Smazat e-mail", + "body": "Smazat tento e-mail?", + "incorrect_password": "Nesprávné heslo, zkuste to prosím znovu", + "password_confirmation": "Potvrďte heslo k účtu pro smazání této e-mailové adresy" + }, + "delete_button_title": "Odebrat e-mailovou adresu", + "email": "E-mail", + "make_primary_button": "Nastavit jako primární", + "not_verified": "Neověřeno", + "primary_email": "Primární e-mail", + "retry_button": "Znovu odeslat kód", + "unverified": "Neověřeno" + }, + "user_email_list": { + "heading": "E-maily", + "no_primary_email_alert": "Žádná primární e-mailová adresa" + }, + "user_greeting": { + "error": "Nepodařilo se načíst uživatele" + }, + "user_name": { + "display_name_field_label": "Zobrazované Jméno" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} aktivní relace", + "active_sessions:few": "{{count}} aktivní relace", + "active_sessions:other": "{{count}} aktivních relací", + "heading": "Kde jste přihlášeni", + "no_active_sessions": { + "default": "Nejste přihlášeni do žádné aplikace.", + "inactive_90_days": "Všechny vaše relace byly aktivní v posledních 90 dnech." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Platnost kódu vypršela. Vyžádejte si prosím nový kód.", + "title": "Platnost kódu vypršela" + }, + "code_field_error": "Kód nebyl rozpoznán", + "code_field_label": "6místný kód", + "code_field_wrong_shape": "Kód musí mít 6 číslic", + "email_sent_alert": { + "description": "Zadejte nový kód níže.", + "title": "Nový kód odeslán" + }, + "enter_code_prompt": "Zadejte šestimístný kód zaslaný na: {{email}}", + "heading": "Ověřte svůj e-mail", + "invalid_code_alert": { + "description": "Zkontrolujte kód zaslaný na váš e-mail a pokračujte aktualizací níže uvedených polí.", + "title": "Zadali jste špatný kód" + }, + "resend_code": "Znovu odeslat kód", + "resend_email": "Znovu odeslat e-mail", + "sent": "Odesláno!", + "unknown_email": "Neznámý e-mail" + } + }, + "mas": { + "scope": { + "edit_profile": "Upravte svůj profil a kontaktní údaje", + "manage_sessions": "Spravujte svá zařízení a relace", + "mas_admin": "Správa uživatelů (urn:mas:admin)", + "send_messages": "Odesílání nových zpráv vaším jménem", + "synapse_admin": "Spravovat server (urn:synapse:admin:*)", + "view_messages": "Zobrazte své stávající zprávy a data", + "view_profile": "Zobrazení informací o vašem profilu a kontaktních údajů" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/da.json b/matrix-authentication-service/frontend/locales/da.json new file mode 100644 index 00000000..0529125f --- /dev/null +++ b/matrix-authentication-service/frontend/locales/da.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Tilbage", + "cancel": "Annuller", + "clear": "Ryd", + "close": "Luk", + "collapse": "Fold sammen", + "confirm": "Bekræft", + "continue": "Fortsæt", + "edit": "Redigér", + "expand": "Udvid", + "save": "Gem", + "save_and_continue": "Gem og fortsæt", + "sign_out": "Log ud", + "start_over": "Begynd forfra" + }, + "branding": { + "privacy_policy": { + "alt": "Link til tjenestens privatlivspolitik", + "link": "Privatlivspolitik" + }, + "terms_and_conditions": { + "alt": "Link til servicevilkår", + "link": "Vilkår & Betingelser" + } + }, + "common": { + "add": "Tilføj", + "e2ee": "End-to-end kryptering", + "error": "Fejl", + "loading": "Indlæser...", + "next": "Næste", + "password": "Adgangskode", + "previous": "Forrige", + "saved": "Gemt", + "saving": "Gemmer..." + }, + "frontend": { + "account": { + "account_password": "Adgangskode til konto", + "contact_info": "Kontaktoplysninger", + "delete_account": { + "alert_description": "Denne konto vil blive slettet permanent, og du vil ikke længere have adgang til nogen af dine beskeder.", + "alert_title": "Du er ved at miste alle dine data", + "button": "Slet konto", + "dialog_description": "Bekræft, at du ønsker at slette din konto:\n\n\nDu vil ikke kunne genaktivere din konto\nDu vil ikke længere kunne logge ind\nIngen vil kunne genbruge dit brugernavn (MXID), heller ikke dig selv\nDu vil forlade alle rum og direkte beskeder, du er i\nDu vil blive fjernet fra identitetsserveren, og ingen vil kunne finde dig med din e-mail eller dit telefonnummer\n\nDine gamle beskeder vil stadig være synlige for folk, der har modtaget dem. Vil du gerne skjule dine sendte beskeder for folk, der tilmelder sig rum i fremtiden?", + "dialog_title": "Vil du slette denne konto?", + "erase_checkbox_label": "Ja, skjul alle mine beskeder fra nye deltagere", + "incorrect_password": "Forkert adgangskode, prøv venligst igen", + "mxid_label": "Bekræft dit Matrix ID ({{ mxid }})", + "mxid_mismatch": "Denne værdi stemmer ikke overens med dit Matrix-ID", + "password_label": "Indtast din adgangskode for at fortsætte" + }, + "edit_profile": { + "display_name_help": "Dette er, hvad andre vil se, uanset hvor du er logget ind.", + "display_name_label": "Visningsnavn", + "title": "Redigér profil", + "username_label": "Brugernavn" + }, + "password": { + "change": "Skift adgangskode", + "change_disabled": "Adgangskodeændringer er deaktiveret af administratoren.", + "label": "Adgangskode" + }, + "sign_out": { + "button": "Log ud af kontoen", + "dialog": "Vil du logge ud af denne konto?" + }, + "title": "Din konto" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Den indtastede e-mail er ikke tilladt af serverpolitikken.", + "title": "E-mail nægtet af politik" + }, + "email_denied_error": "Den indtastede e-mail er ikke tilladt af serverpolitikken", + "email_exists_alert": { + "text": "Den indtastede e-mail er allerede føjet til denne konto", + "title": "E-mailen er allerede i brug" + }, + "email_exists_error": "Den indtastede e-mail er allerede føjet til denne konto", + "email_field_help": "Tilføj en alternativ e-mail, du kan bruge til at få adgang til denne konto.", + "email_field_label": "Tilføj e-mail", + "email_in_use_error": "Den indtastede e-mail er allerede i brug", + "email_invalid_alert": { + "text": "Den indtastede e-mail er ugyldig", + "title": "Ugyldig e-mail" + }, + "email_invalid_error": "Den indtastede e-mail er ugyldig", + "incorrect_password_error": "Forkert adgangskode, prøv venligst igen", + "password_confirmation": "Bekræft adgangskoden til din konto for at tilføje denne e-mail-adresse" + }, + "app_sessions_list": { + "error": "Kunne ikke indlæse appsessioner", + "heading": "Apps" + }, + "browser_session_details": { + "current_badge": "Nuværende", + "session_details_title": "Session" + }, + "browser_sessions_overview": { + "body:one": "{{count}} aktiv session", + "body:other": "{{count}} aktive sessioner", + "heading": "Browsere", + "no_active_sessions": { + "default": "Du er ikke logget ind på nogen webbrowsere.", + "inactive_90_days": "Alle dine sessioner har været aktive i de sidste 90 dage." + }, + "view_all_button": "Se alle" + }, + "compat_session_detail": { + "client_details_title": "Klient-information", + "name": "Navn", + "session_details_title": "Session" + }, + "device_type_icon_label": { + "desktop": "Desktop", + "mobile": "Mobil", + "pc": "Computer", + "tablet": "Tablet", + "unknown": "Ukendt enhedstype", + "web": "Web" + }, + "email_in_use": { + "heading": "E-mailadressen {{email}} er allerede i brug." + }, + "end_session_button": { + "confirmation_modal_title": "Er du sikker på, at du vil afslutte denne session?", + "text": "Fjern enhed" + }, + "error": { + "hideDetails": "Skjul detaljer", + "showDetails": "Vis detaljer", + "subtitle": "Der opstod en uventet fejl. Prøv venligst igen.", + "title": "Noget gik galt" + }, + "error_boundary_title": "Noget gik galt", + "errors": { + "field_required": "Dette felt er påkrævet", + "rate_limit_exceeded": "Du har indsendt for mange anmodninger på kort tid. Vent et par minutter, og prøv igen." + }, + "last_active": { + "active_date": "Senest aktiv {{relativeDate}}", + "active_now": "Aktiv nu", + "inactive_90_days": "Inaktiv i 90+ dage" + }, + "nav": { + "devices": "Enheder", + "plan": "Abonnementsordning", + "profile": "Profil", + "sessions": "Sessioner", + "settings": "Indstillinger" + }, + "not_found_alert_title": "Ikke fundet.", + "not_logged_in_alert": "Du er ikke logget ind.", + "oauth2_client_detail": { + "details_title": "Klient-information", + "id": "Klient ID", + "name": "Navn", + "policy": "Politik", + "terms": "Brugervilkår" + }, + "oauth2_session_detail": { + "client_details_name": "Navn", + "client_title": "Klient-information", + "session_details_title": "Session" + }, + "pagination_controls": { + "total": "I alt: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Nuværende adgangskode", + "failure": { + "description": { + "account_locked": "Din konto er låst og kan ikke gendannes på nuværende tidspunkt. Hvis dette er sket uventet, bedes du kontakte din serveradministrator.", + "expired_recovery_ticket": "Gendannelseslinket er udløbet. Start venligst kontogendannelsesprocessen igen fra begyndelsen.", + "invalid_new_password": "Den nye adgangskode, du valgte, er ugyldig. Muligvis fordi den ikke opfylder den konfigurerede sikkerhedspolitik.", + "no_current_password": "Du har ikke en aktuel adgangskode.", + "no_such_recovery_ticket": "Gendannelseslinket er ugyldigt. Hvis du har kopieret linket fra genoprettelses e-mailen, skal du kontrollere, at det fulde link blev kopieret.", + "password_changes_disabled": "Adgangskodeændringer er deaktiveret.", + "recovery_ticket_already_used": "Dette genoprettelseslink er allerede blevet brugt. Det kan ikke bruges igen.", + "unspecified": "Dette kan være et midlertidigt problem, så prøv igen senere. Hvis problemet fortsætter, skal du kontakte din serveradministrator.", + "wrong_password": "Den adgangskode, du har angivet som din nuværende, er forkert. Prøv venligst igen." + }, + "title": "Kunne ikke opdatere adgangskoden" + }, + "new_password_again_label": "Indtast ny adgangskode igen", + "new_password_label": "Ny adgangskode", + "passwords_match": "Adgangskoderne matcher!", + "passwords_no_match": "Adgangskoderne stemmer ikke overens", + "subtitle": "Vælg en ny adgangskode til din konto.", + "success": { + "description": "Din adgangskode er blevet opdateret.", + "title": "Adgangskode opdateret" + }, + "title": "Skift din adgangskode" + }, + "password_reset": { + "consumed": { + "subtitle": "For at oprette en ny adgangskode skal du starte forfra og vælge \"Glemt adgangskode\".", + "title": "Dette link til at nulstille din adgangskode er allerede blevet brugt" + }, + "expired": { + "resend_email": "Gensend e-mail", + "subtitle": "Anmod om en ny e-mail, der vil blive sendt til: {{email}}", + "title": "Linket til nulstilling af din adgangskode er udløbet" + }, + "subtitle": "Vælg en ny adgangskode til din konto.", + "title": "Nulstil din adgangskode" + }, + "password_strength": { + "placeholder": "Adgangskodens styrke", + "score": { + "0": "Ekstremt svag adgangskode", + "1": "Meget svag adgangskode", + "2": "Svag adgangskode", + "3": "Stærk adgangskode", + "4": "Meget stærk adgangskode" + }, + "suggestion": { + "all_uppercase": "Brug stor skrift, men ikke til alle bogstaver.", + "another_word": "Tilføj flere ord, der er mindre almindelige.", + "associated_years": "Undgå år, der er forbundet med dig.", + "capitalization": "Skriv mere end det første bogstav med stort.", + "dates": "Undgå datoer og år, der er forbundet med dig.", + "l33t": "Undgå forudsigelige bogstavudskiftninger som '@' for 'a'.", + "longer_keyboard_pattern": "Brug længere tastaturmønstre, og skift skriveretning flere gange.", + "no_need": "Du kan oprette stærke adgangskoder uden at bruge symboler, tal eller store bogstaver.", + "pwned": "Hvis du bruger denne adgangskode et andet sted, bør du ændre den.", + "recent_years": "Undgå de seneste år.", + "repeated": "Undgå gentagne ord og tegn.", + "reverse_words": "Undgå omvendte stavemåder af almindelige ord.", + "sequences": "Undgå almindelige tegnsekvenser.", + "use_words": "Brug flere ord, men undgå almindelige sætninger." + }, + "too_weak": "Denne adgangskode er for svag", + "warning": { + "common": "Dette er en almindeligt brugt adgangskode.", + "common_names": "Almindelige navne og efternavne er lette at gætte.", + "dates": "Datoer er lette at gætte.", + "extended_repeat": "Gentagne karaktermønstre som \"abcabcabc\" er nemme at gætte.", + "key_pattern": "Korte tastaturmønstre er lette at gætte.", + "names_by_themselves": "Enkeltnavne eller efternavne er lette at gætte.", + "pwned": "Din adgangskode er blevet afsløret i et sikkerhedsbrud på Internettet.", + "recent_years": "De seneste år er nemme at gætte.", + "sequences": "Almindelige karaktersekvenser som \"abc\" er nemme at gætte.", + "similar_to_common": "Dette svarer til en almindeligt anvendt adgangskode.", + "simple_repeat": "Gentagne tegn som \"aaa\" er nemme at gætte.", + "straight_row": "Lige rækker af taster på tastaturet er lette at gætte.", + "top_hundred": "Dette er en hyppigt brugt adgangskode.", + "top_ten": "Dette er en meget brugt adgangskode.", + "user_inputs": "Der bør ikke være nogen personlige eller siderelaterede data.", + "word_by_itself": "Enkeltord er lette at gætte." + } + }, + "reset_cross_signing": { + "button": "Nulstil identitet", + "cancelled": { + "description_1": "Du kan nu lukke dette vindue og gå tilbage til appen for at fortsætte.", + "description_2": "Hvis du er logget ud overalt og ikke kan huske din gendannelseskode, er du stadig nødt til at nulstille din identitet.", + "heading": "Nulstilling af identitet annulleret." + }, + "description": "Hvis du ikke er logget ind på andre enheder, og du har mistet din gendannelsesnøgle, skal du nulstille din identitet for at fortsætte med at bruge appen.", + "effect_list": { + "negative_1": "Du vil miste din eksisterende beskedhistorik", + "negative_2": "Du bliver nødt til at verificere alle dine eksisterende enheder og kontakter påny", + "neutral_1": "Du mister enhver meddelelseshistorik, der kun er gemt på serveren", + "neutral_2": "Du bliver nødt til at verificere alle dine eksisterende enheder og kontakter påny", + "positive_1": "Dine kontooplysninger, kontakter, præferencer og din chatliste gemmes" + }, + "failure": { + "description": "Dette kan være et midlertidigt problem, så prøv igen senere. Hvis problemet fortsætter, skal du kontakte din serveradministrator.", + "heading": "Kunne ikke tillade nulstilling af kryptoidentitet", + "title": "Det lykkedes ikke at tillade kryptoidentitet" + }, + "finish_reset": "Afslut nulstilling", + "heading": "Nulstil din identitet, hvis du ikke kan bekræfte på en anden måde", + "start_reset": "Start nulstilling", + "success": { + "description": "Nulstilling af identitet er blevet godkendt i de næste {{minutes}} minutter. Du kan lukke dette vindue og gå tilbage til appen for at fortsætte.", + "heading": "Identitetsnulstilling er gennemført. Gå tilbage til appen for at afslutte processen.", + "title": "Nulstilling af kryptoidentitet midlertidigt tilladt" + }, + "warning": "Nulstil kun din identitet, hvis du ikke har adgang til en anden enhed, der er logget ind, og du har mistet din gendannelsesnøgle." + }, + "selectable_session": { + "label": "Vælg session" + }, + "session": { + "client_id_label": "Klient ID", + "current": "Nuværende", + "current_badge": "Nuværende", + "device_id_label": "Enheds-ID", + "finished_date": "Færdig ", + "finished_label": "Færdig", + "generic_browser_session": "Browser-session", + "id_label": "ID", + "ip_label": "IP-adresse", + "last_active_label": "Sidst aktiv", + "last_auth_label": "Sidste godkendelse", + "name_for_platform": "{{name}} til {{platform}}", + "scopes_label": "Omfang", + "set_device_name": { + "help": "Angiv et navn, der hjælper dig med at identificere denne enhed.", + "label": "Enhedens navn", + "title": "Rediger navnet på enheden" + }, + "signed_in_date": "Logget ind ", + "signed_in_label": "Logget ind", + "title": "Enhedsoplysninger", + "unknown_browser": "Ukendt browser", + "unknown_device": "Ukendt enhed", + "uri_label": "Type", + "user_id_label": "Bruger-ID", + "username_label": "Brugernavn" + }, + "session_detail": { + "alert": { + "button": "Gå tilbage", + "text": "Denne session eksisterer ikke eller er ikke længere aktiv.", + "title": "Kan ikke finde session: {{deviceId}}" + } + }, + "unknown_route": "Ukendt rute {{route}}", + "unverified_email_alert": { + "button": "Kontroller og verificer", + "text:one": "Du har {{count}} ubekræftet e-mailadresse.", + "text:other": "Du har {{count}} ubekræftede e-mailadresser.", + "title": "Ubekræftet e-mail" + }, + "user_email": { + "cant_delete_primary": "Vælg en anden primær e-mail for at slette denne.", + "delete_button_confirmation_modal": { + "action": "Slet e-mail", + "body": "Slet denne e-mail?", + "incorrect_password": "Forkert adgangskode, prøv venligst igen", + "password_confirmation": "Bekræft adgangskoden til din konto for at slette denne e-mail-adresse" + }, + "delete_button_title": "Fjern e-mailadresse", + "email": "E-mail", + "make_primary_button": "Gør til primær", + "not_verified": "Ikke verificeret", + "primary_email": "Primær e-mail", + "retry_button": "Send koden igen", + "unverified": "Uverificeret" + }, + "user_email_list": { + "heading": "E-mails", + "no_primary_email_alert": "Ingen primær e-mailadresse" + }, + "user_greeting": { + "error": "Kunne ikke indlæse brugeren" + }, + "user_name": { + "display_name_field_label": "Vist navn" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} aktiv session", + "active_sessions:other": "{{count}} aktive sessioner", + "heading": "Hvor du er logget ind", + "no_active_sessions": { + "default": "Du er ikke logget ind på nogen applikation.", + "inactive_90_days": "Alle dine sessioner har været aktive i de sidste 90 dage." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Koden er udløbet. Anmod om en ny kode.", + "title": "Koden er udløbet" + }, + "code_field_error": "Kode ikke genkendt", + "code_field_label": "6-cifret kode", + "code_field_wrong_shape": "Koden skal være 6 cifre", + "email_sent_alert": { + "description": "Indtast den nye kode herunder.", + "title": "Ny kode sendt" + }, + "enter_code_prompt": "Indtast den 6-cifrede kode, der er sendt til: {{email}} ", + "heading": "Bekræft din e-mail", + "invalid_code_alert": { + "description": "Kontroller koden, der er sendt til din e-mail, og opdater felterne herunder for at fortsætte.", + "title": "Du har indtastet en forkert kode" + }, + "resend_code": "Send koden igen", + "resend_email": "Gensend e-mail", + "sent": "Sendt!", + "unknown_email": "Ukendt e-mail" + } + }, + "mas": { + "scope": { + "edit_profile": "Rediger din profil og dine kontaktoplysninger", + "manage_sessions": "Administrer dine enheder og sessioner", + "mas_admin": "Administrer enhver bruger på matrix-authentication-service", + "send_messages": "Send nye beskeder på dine vegne", + "synapse_admin": "Administrer Synapse hjemmeserveren", + "view_messages": "Se dine eksisterende beskeder og data", + "view_profile": "Se dine profiloplysninger og kontaktoplysninger" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/de.json b/matrix-authentication-service/frontend/locales/de.json new file mode 100644 index 00000000..7a78e7c1 --- /dev/null +++ b/matrix-authentication-service/frontend/locales/de.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Zurück", + "cancel": "Abbrechen", + "clear": "Löschen", + "close": "Schließen", + "collapse": "Zusammenbruch", + "confirm": "Bestätigen", + "continue": "Weiter", + "edit": "Bearbeiten", + "expand": "Erweitern", + "save": "Speichern", + "save_and_continue": "Speichern und fortfahren", + "sign_out": "Abmelden", + "start_over": "Von vorne anfangen" + }, + "branding": { + "privacy_policy": { + "alt": "Link zur Datenschutzerklärung des Dienstes", + "link": "Datenschutzerklärung" + }, + "terms_and_conditions": { + "alt": "Link zu den Allgemeinen Geschäftsbedingungen des Dienstes", + "link": "Allgemeine Geschäftsbedingungen" + } + }, + "common": { + "add": "Hinzufügen", + "e2ee": "Ende-zu-Ende-Verschlüsselung", + "error": "Fehler", + "loading": "Lade …", + "next": "Weiter", + "password": "Passwort", + "previous": "Zurück", + "saved": "Gespeichert", + "saving": "Speichern..." + }, + "frontend": { + "account": { + "account_password": "Kontokennwort", + "contact_info": "Kontaktinformation", + "delete_account": { + "alert_description": "Dieses Konto wird dauerhaft entfernt und du hast keinen Zugriff mehr auf deine Nachrichten.", + "alert_title": "Du bist kurz davor, alle deine Daten zu verlieren.", + "button": "Account löschen", + "dialog_description": "Bestätige, dass du dein Konto löschen möchtest:\n\n\nDu kannst dein Konto nicht reaktivieren\nDu kannst dich nicht mehr anmelden\nNiemand kann deinen Benutzernamen (MXID) wieder verwenden, auch du nicht.\nDu verlässt alle Gruppen und Chats\nDu wirst vom Identitätsserver entfernt und niemand kann dich mit deiner E-Mail-Adresse oder Telefonnummer finden\n\nDeine alten Nachrichten sind für die jeweiligen Empfänger weiterhin sichtbar. Möchtest du deine gesendeten Nachrichten vor zukünftigen Gruppen-Besuchern verbergen?", + "dialog_title": "Dieses Konto löschen?", + "erase_checkbox_label": "Ja, alle meine Nachrichten vor neuen Mitgliedern verbergen", + "incorrect_password": "Falsches Passwort, versuch's nochmal", + "mxid_label": "Bestätige deine Matrix-ID ({{ mxid }})", + "mxid_mismatch": "Dieser Wert passt nicht zu deiner Matrix-ID.", + "password_label": "Gib dein Passwort ein, um weiterzumachen" + }, + "edit_profile": { + "display_name_help": "Dies ist der öffentliche Nutzername.", + "display_name_label": "Anzeigename", + "title": "Profil bearbeiten", + "username_label": "Benutzername" + }, + "password": { + "change": "Passwort ändern", + "change_disabled": "Passwortänderungen wurden vom Administrator deaktiviert.", + "label": "Passwort" + }, + "sign_out": { + "button": "Vom Konto abmelden", + "dialog": "Von diesem Konto abmelden?" + }, + "title": "Dein Konto" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Die eingegebene E-Mail-Adresse wird von der Serverrichtlinie nicht zugelassen.", + "title": "E-Mail-Adresse durch Richtlinie abgelehnt" + }, + "email_denied_error": "Die eingegebene E-Mail wird von der Serverrichtlinie nicht zugelassen.", + "email_exists_alert": { + "text": "Die eingegebene E-Mail-Adresse ist diesem Konto bereits zugeordnet", + "title": "Diese E-Mailadresse existiert bereits" + }, + "email_exists_error": "Die eingegebene E-Mail-Adresse ist diesem Konto bereits zugeordnet", + "email_field_help": "Gib eine alternative E-Mail-Adresse an, mit der du auf dieses Konto zugreifen kannst.", + "email_field_label": "E-Mail-Adresse hinzufügen", + "email_in_use_error": "Die eingegebene E-Mail wird bereits verwendet", + "email_invalid_alert": { + "text": "Die eingegebene E-Mail-Adresse ist ungültig", + "title": "Ungültige Email-Adresse" + }, + "email_invalid_error": "Die eingegebene E-Mail-Adresse ist ungültig", + "incorrect_password_error": "Falsches Passwort, versuch's nochmal", + "password_confirmation": "Bestätige dein Passwort, um diese E-Mail-Adresse hinzuzufügen." + }, + "app_sessions_list": { + "error": "App-Sitzungen konnten nicht geladen werden", + "heading": "Anwendungen" + }, + "browser_session_details": { + "current_badge": "Aktuell", + "session_details_title": "Sitzung" + }, + "browser_sessions_overview": { + "body:one": "{{count}} aktive Sitzung", + "body:other": "{{count}} aktive Sitzungen", + "heading": "Browser", + "no_active_sessions": { + "default": "Du bist in keinem Webbrowser angemeldet.", + "inactive_90_days": "Alle deine Sitzungen waren in den letzten 90 Tagen aktiv." + }, + "view_all_button": "Alle anzeigen" + }, + "compat_session_detail": { + "client_details_title": "Geräte Information", + "name": "Name", + "session_details_title": "Sitzung" + }, + "device_type_icon_label": { + "desktop": "Desktop", + "mobile": "Mobil", + "pc": "Computer", + "tablet": "Tablet", + "unknown": "Unbekannter Gerätetyp", + "web": "Web" + }, + "email_in_use": { + "heading": "Die E-Mail-Adresse {{email}} wird bereits verwendet." + }, + "end_session_button": { + "confirmation_modal_title": "Möchtest du diese Sitzung wirklich beenden?", + "text": "Gerät entfernen" + }, + "error": { + "hideDetails": "Details ausblenden", + "showDetails": "Details anzeigen", + "subtitle": "Ein unerwarteter Fehler ist aufgetreten, bitte versuch's nochmal.", + "title": "Etwas ist schief gelaufen" + }, + "error_boundary_title": "Etwas ist schief gelaufen", + "errors": { + "field_required": "Dieses Feld ist ein Pflichtfeld", + "rate_limit_exceeded": "Du hast in kurzer Zeit zu viele Anfragen gestellt. Warte bitte ein paar Minuten und versuch's nochmal." + }, + "last_active": { + "active_date": "Aktiv {{relativeDate}}", + "active_now": "Jetzt aktiv", + "inactive_90_days": "Seit über 90 Tagen inaktiv" + }, + "nav": { + "devices": "Geräte", + "plan": "Abo", + "profile": "Profil", + "sessions": "Sitzungen", + "settings": "Einstellungen" + }, + "not_found_alert_title": "Nicht gefunden.", + "not_logged_in_alert": "Du bist nicht angemeldet.", + "oauth2_client_detail": { + "details_title": "Geräte Information", + "id": "Client-ID", + "name": "Name", + "policy": "Richtlinie", + "terms": "Nutzungsbedingungen" + }, + "oauth2_session_detail": { + "client_details_name": "Name", + "client_title": "Geräte Information", + "session_details_title": "Sitzung" + }, + "pagination_controls": { + "total": "Gesamt: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Aktuelles Passwort", + "failure": { + "description": { + "account_locked": "Dein Konto ist gesperrt und kann im Moment nicht wiederhergestellt werden. Wenn du das nicht erwartet hast, wende dich bitte an deinen Server-Admin.", + "expired_recovery_ticket": "Der Link zur Kontowiederherstellung ist abgelaufen. Bitte fang den Prozess noch mal von vorne an.", + "invalid_new_password": "Das neue Passwort, das du gewählt hast, ist ungültig; es entspricht möglicherweise nicht den Sicherheitsrichtlinien.", + "no_current_password": "Du hast kein aktuelles Passwort.", + "no_such_recovery_ticket": "Der Link zum Wiederherstellen ist nicht gültig. Wenn du den Link aus der E-Mail zum Wiederherstellen kopiert hast, schau bitte nach, ob du den vollständigen Link kopiert hast.", + "password_changes_disabled": "Passwortänderungen sind deaktiviert.", + "recovery_ticket_already_used": "Der Wiederherstellungslink wurde bereits verwendet. Er kann nicht erneut verwendet werden.", + "unspecified": "Das könnte ein vorübergehendes Problem sein, also versuch's später nochmal. Wenn das Problem weiterhin besteht, wende dich bitte an deinen Server-Admin.", + "wrong_password": "Das Passwort, das du als dein aktuelles Passwort angegeben hast, ist falsch. Versuch's bitte nochmal." + }, + "title": "Aktualisierung des Passworts fehlgeschlagen" + }, + "new_password_again_label": "Neues Passwort erneut eingeben", + "new_password_label": "Neues Passwort", + "passwords_match": "Passwörter stimmen überein!", + "passwords_no_match": "Passwörter stimmen nicht überein", + "subtitle": "Such dir ein neues Passwort für dein Konto aus.", + "success": { + "description": "Dein Passwort wurde geändert.", + "title": "Passwort geändert" + }, + "title": "Ändere dein Passwort" + }, + "password_reset": { + "consumed": { + "subtitle": "Um ein neues Passwort zu erstellen, fang einfach von vorne an und wähle „Passwort vergessen“.", + "title": "Der Link zum Zurücksetzen deines Passworts wurde bereits verwendet" + }, + "expired": { + "resend_email": "E-Mail erneut senden", + "subtitle": "Eine neue E-Mail anfordern, die an folgende Adresse gesendet wird: {{email}}", + "title": "Der Link zum Zurücksetzen deines Passworts ist abgelaufen" + }, + "subtitle": "Such dir ein neues Passwort für dein Konto aus.", + "title": "Setze dein Passwort zurück" + }, + "password_strength": { + "placeholder": "Passwortstärke", + "score": { + "0": "Extrem schwaches Passwort", + "1": "Sehr schwaches Passwort", + "2": "Schwaches Passwort", + "3": "Starkes Passwort", + "4": "Sehr starkes Passwort" + }, + "suggestion": { + "all_uppercase": "Schreib ein paar Buchstaben groß, aber nicht alle.", + "another_word": "Füge mehr Wörter hinzu, die weniger gebräuchlich sind.", + "associated_years": "Vermeide Jahre, die mit dir in Verbindung stehen.", + "capitalization": "Schreib mehr als nur den ersten Buchstaben groß.", + "dates": "Vermeide Daten und Jahreszahlen, die mit dir in Verbindung stehen.", + "l33t": "Vermeide vorhersehbare Buchstabenersetzungen wie „@“ für „a“.", + "longer_keyboard_pattern": "Benutz längere Tastaturmuster und wechsel mehrmals die Schreibrichtung.", + "no_need": "Du kannst starke Passwörter erstellen, ohne Symbole, Zahlen oder Großbuchstaben zu benutzen.", + "pwned": "Wenn du dieses Passwort auch woanders benutzt, solltest du es ändern.", + "recent_years": "Vermeide die letzten Jahre.", + "repeated": "Vermeide es, Wörter und Zeichen zu wiederholen.", + "reverse_words": "Vermeide es, gängige Wörter rückwärts zu schreiben.", + "sequences": "Vermeide gängige Zeichenfolgen.", + "use_words": "Benutze mehrere Wörter, aber vermeide gängige Redewendungen." + }, + "too_weak": "Dieses Passwort ist zu schwach", + "warning": { + "common": "Dies ist ein häufig verwendetes Passwort.", + "common_names": "Gebräuchliche Vor- und Nachnamen sind leicht zu erraten.", + "dates": "Daten sind leicht zu erraten.", + "extended_repeat": "Wiederholte Zeichenmuster wie „abcabcabc“ sind leicht zu erraten.", + "key_pattern": "Kurze Eingaben sind leicht zu erraten.", + "names_by_themselves": "Einzelne Vor- oder Nachnamen sind leicht zu erraten.", + "pwned": "Dein Passwort wurde durch eine Datenpanne im Internet preisgegeben.", + "recent_years": "Die letzten Jahre sind leicht zu erraten.", + "sequences": "Gängige Zeichenfolgen wie „abc“ sind leicht zu erraten.", + "similar_to_common": "Dies ähnelt einem häufig verwendeten Passwort.", + "simple_repeat": "Sich wiederholende Zeichen wie \"aaa\" sind leicht zu erraten.", + "straight_row": "Gerade Reihen von Tasten auf deiner Tastatur sind leicht zu erraten.", + "top_hundred": "Dies ist ein häufig verwendetes Passwort.", + "top_ten": "Dies ist ein häufig verwendetes Passwort.", + "user_inputs": "Es sollten keine persönlichen oder seitenbezogenen Daten vorhanden sein.", + "word_by_itself": "Einzelne Wörter sind leicht zu erraten." + } + }, + "reset_cross_signing": { + "button": "Identität zurücksetzen", + "cancelled": { + "description_1": "Du kannst dieses Fenster schließen und zur App zurückgehen, um weiterzumachen.", + "description_2": "Wenn du dich überall abgemeldet hast und deinen Wiederherstellungs-Schlüssel nicht mehr weißt, musst du deine Identität zurücksetzen.", + "heading": "Identitätszurücksetzung abgebrochen." + }, + "description": "Wenn du auf keinem anderen Gerät angemeldet bist und deinen Wiederherstellungs-Schlüssel verloren hast, musst du deine Identität zurücksetzen, um die App weiter nutzen zu können.", + "effect_list": { + "negative_1": "Du verlierst deine bestehenden Chats.", + "negative_2": "Du musst alle deine Geräte und Kontakte nochmal verifizieren.", + "neutral_1": "Du verlierst alle Nachrichten, die nur auf dem Server gespeichert sind.", + "neutral_2": "Du musst alle deine Geräte und Kontakte nochmal verifizieren.", + "positive_1": "Deine Kontodaten, Kontakte, Einstellungen und Chat-Liste bleiben erhalten." + }, + "failure": { + "description": "Das könnte ein vorübergehendes Problem sein, also versuch's später nochmal. Wenn das Problem weiterhin besteht, wende dich bitte an deinen Server-Admin.", + "heading": "Zurücksetzen der Krypto-Identität konnte nicht zugelassen werden", + "title": "Krypto-Identität konnte nicht zugelassen werden" + }, + "finish_reset": "Reset beenden", + "heading": "Erstelle eine neue Identität, solltest du sie nicht auf andere Weise bestätigen können.", + "start_reset": "Reset starten", + "success": { + "description": "Das Zurücksetzen der Identität wurde für die nächsten {{minutes}} Minuten genehmigt. Du kannst dieses Fenster schließen und zur App zurückkehren, um fortzufahren.", + "heading": "Identität erfolgreich zurückgesetzt. Geh zurück zur App, um den Vorgang abzuschließen.", + "title": "Das Zurücksetzen der Krypto-Identität ist vorübergehend erlaubt" + }, + "warning": "Setze deine Identität nur zurück, wenn du keinen Zugriff auf ein anderes angemeldetes Gerät hast und deinen Wiederherstellungsschlüssel verloren hast." + }, + "selectable_session": { + "label": "Sitzung auswählen" + }, + "session": { + "client_id_label": "Client-ID", + "current": "Aktuell", + "current_badge": "Aktuell", + "device_id_label": "Geräte-ID", + "finished_date": "Beendet ", + "finished_label": "Fertig", + "generic_browser_session": "Browser-Sitzung", + "id_label": "ID", + "ip_label": "IP-Adresse", + "last_active_label": "Zuletzt aktiv", + "last_auth_label": "Letzte Anmeldung", + "name_for_platform": "{{name}} für {{platform}}", + "scopes_label": "Berechtigungsumfang", + "set_device_name": { + "help": "Gib einen Namen ein, mit dem du dieses Gerät leicht wiederfindest.", + "label": "Gerätename", + "title": "Gerätename bearbeiten" + }, + "signed_in_date": "Angemeldet ", + "signed_in_label": "Angemeldet", + "title": "Geräte Details", + "unknown_browser": "Unbekannter Browser", + "unknown_device": "Unbekanntes Gerät", + "uri_label": "URI", + "user_id_label": "Benutzer-ID", + "username_label": "Benutzername" + }, + "session_detail": { + "alert": { + "button": "Zurück", + "text": "Diese Sitzung existiert nicht oder ist nicht mehr aktiv.", + "title": "Sitzung kann nicht gefunden werden: {{deviceId}}" + } + }, + "unknown_route": "Unbekannte Route {{route}}", + "unverified_email_alert": { + "button": "Überprüfen und verifizieren", + "text:one": "Du hast {{count}} unverifizierte E-Mail-Adresse.", + "text:other": "Du hast {{count}} unverifizierte E-Mail-Adressen.", + "title": "Nicht verifizierte E-Mail-Adresse" + }, + "user_email": { + "cant_delete_primary": "Wähle eine andere primäre E-Mail-Adresse aus, um diese zu löschen.", + "delete_button_confirmation_modal": { + "action": "E-Mail löschen", + "body": "Diese E-Mail löschen?", + "incorrect_password": "Falsches Passwort, versuch's nochmal", + "password_confirmation": "Bestätige dein Passwort, um diese E-Mail-Adresse zu löschen." + }, + "delete_button_title": "E-Mail-Adresse entfernen", + "email": "E-Mail", + "make_primary_button": "Als primäre Adresse festlegen", + "not_verified": "Nicht verifiziert", + "primary_email": "Primäre E-Mail-Adresse", + "retry_button": "Code erneut senden", + "unverified": "Nicht verifiziert" + }, + "user_email_list": { + "heading": "E-Mails", + "no_primary_email_alert": "Keine primäre E-Mail-Adresse" + }, + "user_greeting": { + "error": "Fehler beim Laden des Benutzers" + }, + "user_name": { + "display_name_field_label": "Anzeigename" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} aktive Sitzung", + "active_sessions:other": "{{count}} aktive Sitzungen", + "heading": "Wo du angemeldet bist", + "no_active_sessions": { + "default": "Du bist bei keiner Anwendung angemeldet.", + "inactive_90_days": "Alle deine Sitzungen waren in den letzten 90 Tagen aktiv." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Der Code ist abgelaufen. Bitte fordere einen neuen Code an.", + "title": "Code abgelaufen" + }, + "code_field_error": "Code nicht erkannt", + "code_field_label": "6-stelliger Code", + "code_field_wrong_shape": "Der Code muss 6-stellig sein", + "email_sent_alert": { + "description": "Gib den neuen Code unten ein.", + "title": "Neuer Code gesendet" + }, + "enter_code_prompt": "Gib den 6-stelligen Code ein, der an {{email}} gesendet wurde", + "heading": "Bestätige deine E-Mail", + "invalid_code_alert": { + "description": "Überprüfe den Code, der an deine E-Mail-Adresse gesendet wurde, und aktualisiere die folgenden Felder, um fortzufahren.", + "title": "Du hast den falschen Code eingegeben." + }, + "resend_code": "Code erneut senden", + "resend_email": "E-Mail erneut senden", + "sent": "Gesendet!", + "unknown_email": "Unbekannte E-Mail-Adresse" + } + }, + "mas": { + "scope": { + "edit_profile": "Bearbeite dein Profil und deine Kontaktdaten", + "manage_sessions": "Verwalte deine Geräte und Sitzungen", + "mas_admin": "Beliebige Benutzer verwalten (urn:mas:admin)", + "send_messages": "Neue Nachrichten in deinem Namen senden", + "synapse_admin": "Den Synapse-Homeserver verwalten (urn:synapse:admin:*)", + "view_messages": "Zeig deine vorhandenen Nachrichten und Daten an", + "view_profile": "Deine Profilinfos und Kontaktdaten anzeigen" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/en.json b/matrix-authentication-service/frontend/locales/en.json new file mode 100644 index 00000000..f3028c15 --- /dev/null +++ b/matrix-authentication-service/frontend/locales/en.json @@ -0,0 +1,329 @@ +{ + "action": { + "back": "Back", + "cancel": "Cancel", + "clear": "Clear", + "close": "Close", + "collapse": "Collapse", + "confirm": "Confirm", + "continue": "Continue", + "edit": "Edit", + "expand": "Expand", + "save": "Save", + "save_and_continue": "Save and continue", + "sign_out": "Sign out", + "start_over": "Start over" + }, + "branding": { + "privacy_policy": { + "alt": "Link to the service privacy policy", + "link": "Privacy Policy" + }, + "terms_and_conditions": { + "alt": "Link to the service terms and conditions", + "link": "Terms & Conditions" + } + }, + "common": { + "e2ee": "End-to-end encryption", + "loading": "Loading…", + "next": "Next", + "password": "Password", + "previous": "Previous", + "saved": "Saved", + "saving": "Saving…" + }, + "frontend": { + "account": { + "account_password": "Account password", + "contact_info": "Contact info", + "delete_account": { + "alert_description": "This account will be permanently erased and you’ll no longer have access to any of your messages.", + "alert_title": "You’re about to lose all of your data", + "button": "Delete account", + "dialog_description": "Confirm that you would like to delete your account:\n\n\nYou will not be able to reactivate your account\nYou will no longer be able to sign in\nNo one will be able to reuse your username (MXID), including you\nYou will leave all rooms and direct messages you are in\nYou will be removed from the identity server, and no one will be able to find you with your email or phone number\n\nYour old messages will still be visible to people who received them. Would you like to hide your sent messages from people who join rooms in the future?", + "dialog_title": "Delete this account?", + "erase_checkbox_label": "Yes, hide all my messages from new joiners", + "incorrect_password": "Incorrect password, please try again", + "mxid_label": "Confirm your Matrix ID ({{ mxid }})", + "mxid_mismatch": "This value does not match your Matrix ID", + "password_label": "Enter your password to continue" + }, + "edit_profile": { + "display_name_help": "This is what others will see wherever you’re signed in.", + "display_name_label": "Display name", + "title": "Edit profile", + "username_label": "Username" + }, + "password": { + "change": "Change password", + "change_disabled": "Password changes are disabled by the administrator.", + "label": "Password" + }, + "sign_out": { + "button": "Sign out of account", + "dialog": "Sign out of this account?" + }, + "title": "Your account" + }, + "add_email_form": { + "email_denied_error": "The entered email is not allowed by the server policy", + "email_field_help": "Add an alternative email you can use to access this account.", + "email_field_label": "Add email", + "email_in_use_error": "The entered email is already in use", + "email_invalid_error": "The entered email is invalid", + "incorrect_password_error": "Incorrect password, please try again", + "password_confirmation": "Confirm your account password to add this email address" + }, + "browser_session_details": { + "current_badge": "Current" + }, + "browser_sessions_overview": { + "body:one": "{{count}} active session", + "body:other": "{{count}} active sessions", + "heading": "Browsers", + "no_active_sessions": { + "default": "You are not signed in to any web browsers.", + "inactive_90_days": "All your sessions have been active in the last 90 days." + }, + "view_all_button": "View all" + }, + "compat_session_detail": { + "client_details_title": "Client info", + "name": "Name" + }, + "device_type_icon_label": { + "mobile": "Mobile", + "pc": "Computer", + "tablet": "Tablet", + "unknown": "Unknown device type" + }, + "email_in_use": { + "heading": "The email address {{email}} is already in use." + }, + "end_session_button": { + "confirmation_modal_title": "Are you sure you want to end this session?", + "text": "Remove device" + }, + "error": { + "hideDetails": "Hide details", + "showDetails": "Show details", + "subtitle": "An unexpected error occurred. Please try again.", + "title": "Something went wrong" + }, + "errors": { + "field_required": "This field is required", + "rate_limit_exceeded": "You've made too many requests in a short period. Please wait a few minutes and try again." + }, + "last_active": { + "active_date": "Active {{relativeDate}}", + "active_now": "Active now", + "inactive_90_days": "Inactive for 90+ days" + }, + "nav": { + "devices": "Devices", + "plan": "Plan", + "settings": "Settings" + }, + "not_found_alert_title": "Not found.", + "oauth2_client_detail": { + "details_title": "Client info", + "name": "Name", + "policy": "Policy", + "terms": "Terms of service" + }, + "oauth2_session_detail": { + "client_details_name": "Name", + "client_title": "Client info" + }, + "pagination_controls": { + "total": "Total: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Current password", + "failure": { + "description": { + "account_locked": "Your account is locked and can not be recovered at this time. If this is not expected, please contact your server administrator.", + "expired_recovery_ticket": "The recovery link has expired. Please start the account recovery process again from the start.", + "invalid_new_password": "The new password you chose is invalid; it may not meet the configured security policy.", + "no_current_password": "You don't have a current password.", + "no_such_recovery_ticket": "The recovery link is invalid. If you copied the link from the recovery e-mail, please check the full link was copied.", + "password_changes_disabled": "Password changes are disabled.", + "recovery_ticket_already_used": "The recovery link has already been used. It cannot be used again.", + "unspecified": "This might be a temporary problem, so please try again later. If the problem persists, please contact your server administrator.", + "wrong_password": "The password you supplied as your current password is incorrect. Please try again." + }, + "title": "Failed to update password" + }, + "new_password_again_label": "Enter new password again", + "new_password_label": "New password", + "passwords_match": "Passwords match!", + "passwords_no_match": "Passwords don't match", + "subtitle": "Choose a new password for your account.", + "success": { + "description": "Your password has been updated successfully.", + "title": "Password updated" + }, + "title": "Change your password" + }, + "password_reset": { + "consumed": { + "subtitle": "To create a new password, start over and select ”Forgot password“.", + "title": "The link to reset your password has already been used" + }, + "expired": { + "resend_email": "Resend email", + "subtitle": "Request a new email that will be sent to: {{email}}", + "title": "The link to reset your password has expired" + }, + "subtitle": "Choose a new password for your account.", + "title": "Reset your password" + }, + "password_strength": { + "placeholder": "Password strength", + "score": { + "0": "Extremely weak password", + "1": "Very weak password", + "2": "Weak password", + "3": "Strong password", + "4": "Very strong password" + }, + "suggestion": { + "all_uppercase": "Capitalise some, but not all letters.", + "another_word": "Add more words that are less common.", + "associated_years": "Avoid years that are associated with you.", + "capitalization": "Capitalise more than the first letter.", + "dates": "Avoid dates and years that are associated with you.", + "l33t": "Avoid predictable letter substitutions like '@' for 'a'.", + "longer_keyboard_pattern": "Use longer keyboard patterns and change typing direction multiple times.", + "no_need": "You can create strong passwords without using symbols, numbers, or uppercase letters.", + "pwned": "If you use this password elsewhere, you should change it.", + "recent_years": "Avoid recent years.", + "repeated": "Avoid repeated words and characters.", + "reverse_words": "Avoid reversed spellings of common words.", + "sequences": "Avoid common character sequences.", + "use_words": "Use multiple words, but avoid common phrases." + }, + "too_weak": "This password is too weak", + "warning": { + "common": "This is a commonly used password.", + "common_names": "Common names and surnames are easy to guess.", + "dates": "Dates are easy to guess.", + "extended_repeat": "Repeated character patterns like \"abcabcabc\" are easy to guess.", + "key_pattern": "Short keyboard patterns are easy to guess.", + "names_by_themselves": "Single names or surnames are easy to guess.", + "pwned": "Your password was exposed by a data breach on the Internet.", + "recent_years": "Recent years are easy to guess.", + "sequences": "Common character sequences like \"abc\" are easy to guess.", + "similar_to_common": "This is similar to a commonly used password.", + "simple_repeat": "Repeated characters like \"aaa\" are easy to guess.", + "straight_row": "Straight rows of keys on your keyboard are easy to guess.", + "top_hundred": "This is a frequently used password.", + "top_ten": "This is a heavily used password.", + "user_inputs": "There should not be any personal or page related data.", + "word_by_itself": "Single words are easy to guess." + } + }, + "reset_cross_signing": { + "cancelled": { + "description_1": "You can close this window and go back to the app to continue.", + "description_2": "If you're signed out everywhere and don't remember your recovery code, you'll still need to reset your identity.", + "heading": "Identity reset cancelled." + }, + "description": "If you're not signed in to any other devices and you've lost your recovery key, then you'll need to reset your identity to continue using the app.", + "effect_list": { + "neutral_1": "You will lose any message history that's stored only on the server", + "neutral_2": "You will need to verify all your existing devices and contacts again", + "positive_1": "Your account details, contacts, preferences, and chat list will be kept" + }, + "failure": { + "description": "This might be a temporary problem, so please try again later. If the problem persists, please contact your server administrator.", + "heading": "Failed to allow crypto identity reset" + }, + "finish_reset": "Finish reset", + "heading": "Reset your identity in case you can't confirm another way", + "start_reset": "Start reset", + "success": { + "description": "The identity reset has been approved for the next {{minutes}} minutes. You can close this window and go back to the app to continue.", + "heading": "Identity reset successfully. Go back to the app to finish the process." + }, + "warning": "Only reset your identity if you don't have access to another signed-in device and you've lost your recovery key." + }, + "session": { + "client_id_label": "Client ID", + "current": "Current", + "device_id_label": "Device ID", + "finished_label": "Finished", + "generic_browser_session": "Browser session", + "ip_label": "IP Address", + "last_active_label": "Last Active", + "name_for_platform": "{{name}} for {{platform}}", + "scopes_label": "Scopes", + "set_device_name": { + "help": "Set a name that will help you identify this device.", + "label": "Device name", + "title": "Edit device name" + }, + "signed_in_label": "Signed in", + "title": "Device details", + "unknown_browser": "Unknown browser", + "unknown_device": "Unknown device", + "uri_label": "Uri" + }, + "session_detail": { + "alert": { + "button": "Go back", + "text": "This session does not exist, or is no longer active.", + "title": "Cannot find session: {{deviceId}}" + } + }, + "user_email": { + "delete_button_confirmation_modal": { + "action": "Delete email", + "body": "Delete this email?", + "incorrect_password": "Incorrect password, please try again", + "password_confirmation": "Confirm your account password to delete this email address" + }, + "delete_button_title": "Remove email address", + "email": "Email" + }, + "user_sessions_overview": { + "heading": "Where you're signed in", + "no_active_sessions": { + "default": "You are not signed in to any application.", + "inactive_90_days": "All your sessions have been active in the last 90 days." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "The code has expired. Please request a new code.", + "title": "Code expired" + }, + "code_field_error": "Code not recognised", + "code_field_label": "6-digit code", + "code_field_wrong_shape": "Code must be 6 digits", + "email_sent_alert": { + "description": "Enter the new code below.", + "title": "New code sent" + }, + "enter_code_prompt": "Enter the 6-digit code sent to: {{email}}", + "heading": "Verify your email", + "invalid_code_alert": { + "description": "Check the code sent to your email and update the fields below to continue.", + "title": "You entered the wrong code" + }, + "resend_code": "Resend code" + } + }, + "mas": { + "scope": { + "edit_profile": "Edit your profile and contact details", + "manage_sessions": "Manage your devices and sessions", + "mas_admin": "Manage users (urn:mas:admin)", + "send_messages": "Send new messages on your behalf", + "synapse_admin": "Administer the server (urn:synapse:admin:*)", + "view_messages": "View your existing messages and data", + "view_profile": "See your profile info and contact details" + } + } +} diff --git a/matrix-authentication-service/frontend/locales/et.json b/matrix-authentication-service/frontend/locales/et.json new file mode 100644 index 00000000..e9f90b09 --- /dev/null +++ b/matrix-authentication-service/frontend/locales/et.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Tagasi", + "cancel": "Katkesta", + "clear": "Tühjenda", + "close": "Sulge", + "collapse": "Ahenda", + "confirm": "Kinnita", + "continue": "Jätka", + "edit": "Muuda", + "expand": "Laienda", + "save": "Salvesta", + "save_and_continue": "Salvesta ja jätka", + "sign_out": "Logi välja", + "start_over": "Alusta uuesti" + }, + "branding": { + "privacy_policy": { + "alt": "Link selle teenuse privaatsuspoliitikale", + "link": "Privaatsuspoliitika" + }, + "terms_and_conditions": { + "alt": "Link selle teenuse kasutustingimustele", + "link": "Kasutustingimused" + } + }, + "common": { + "add": "Lisa", + "e2ee": "Läbiv krüptimine", + "error": "Viga", + "loading": "Laadime…", + "next": "Edasi", + "password": "Salasõna", + "previous": "Tagasi", + "saved": "Salvestatud", + "saving": "Salvestame…" + }, + "frontend": { + "account": { + "account_password": "Kasutajakonto salasõna", + "contact_info": "Kontaktinfo", + "delete_account": { + "alert_description": "Sinu kasutajakonto kustutatakse jäädavalt ja sul kaovad kõik võimalused lugeda oma sõnumeid", + "alert_title": "Sa oled kaotamas kõiki oma andmeid", + "button": "Kustuta kasutajakonto", + "dialog_description": "Palun kinnita, et soovid oma kasutajakonto kustutada:\n\n\nSa ei saa seda kasutajakontot hiljem enam uuesti tööle panna\nSa ei saa selle kasutajakontoga võrku logida\nMitte keegi ei saa sinu kasutajanime (MXID) pruukida, sealhulgas sina ise\nSa lahkud kõikidest jututubadest ja otsevestlustest\nSa lahkud isikutuvastusserverist ning mitte keegi ei saa sind leida sinu e-posti aadressi ja telefoninumbri alusel\n\nSinu varemsaadetud sõnumid on jätkuvalt nähtavad nende adressaatidele. Aga kas sa tahaksid oma vanad sõnumid peita jututubade tulevaste liitujate eest?", + "dialog_title": "Kas kustutame selle kasutajakonto?", + "erase_checkbox_label": "Jah, peida minu sõnumid uute liitujate eest", + "incorrect_password": "Vale salasõna, palun proovi uuesti", + "mxid_label": "Kinnita oma Matrixi kasutajatunnus ({{ mxid }})", + "mxid_mismatch": "See väärtus ei vasta sinu Matrixi kasutajatunnusele", + "password_label": "Jätkamiseks sisesta oma salasõna" + }, + "edit_profile": { + "display_name_help": "Kui sa oled sisse loginud, siis muud osapooled näevad alljärgnevat teavet", + "display_name_label": "Kuvatav nimi", + "title": "Muuda profiili", + "username_label": "Kasutajanimi" + }, + "password": { + "change": "Muuda salasõna", + "change_disabled": "Serveri haldaja on salasõnade muutmise võimaluse välja lülitanud.", + "label": "Salasõna" + }, + "sign_out": { + "button": "Logi kontolt välja", + "dialog": "Kas logime sellest kontost välja?" + }, + "title": "Sinu kasutajakonto" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Serveri reeglite alusel pole selle e-posti aadressi kasutamine lubatud.", + "title": "Serveri reeglid ei luba seda e-posti aadressi kasutada" + }, + "email_denied_error": "Sisestatud e-posti aadressi kasutamine on serveri reeglitega keelatud", + "email_exists_alert": { + "text": "See e-posti aadress on juba antud kasutajakontoga seotud", + "title": "E-posti aadress on juba kasutusel" + }, + "email_exists_error": "See e-posti aadress on juba antud kasutajakontoga seotud", + "email_field_help": "Ligipääsuks sellele kasutajakontole lisa täiendav e-posti aadress.", + "email_field_label": "Lisa e-posti aadress", + "email_in_use_error": "Sisestatud e-posti aadress on juba kasutusel", + "email_invalid_alert": { + "text": "Lisatud e-posti aadress on vigane", + "title": "Vigane e-posti aadress" + }, + "email_invalid_error": "Lisatud e-posti aadress on vigane", + "incorrect_password_error": "Vale salasõna, palun proovi uuesti", + "password_confirmation": "Selle e-posti aadressi lisamiseks kinnita tegevus oma kasutajakonto salasõnaga" + }, + "app_sessions_list": { + "error": "Rakenduse sessioonide laadimine ei õnnestunud", + "heading": "Rakendused" + }, + "browser_session_details": { + "current_badge": "Praegune", + "session_details_title": "Sessioon" + }, + "browser_sessions_overview": { + "body:one": "{{count}} aktiivne sessioon", + "body:other": "{{count}} aktiivset sessiooni", + "heading": "Veebibrauserid", + "no_active_sessions": { + "default": "Sa pole sisse loginud mitte üheski brauseris.", + "inactive_90_days": "Kõik sinu sessioonid on olnud aktiivsed viimase 90 päeva jooksul." + }, + "view_all_button": "Näita kõiki" + }, + "compat_session_detail": { + "client_details_title": "Kliendi teave", + "name": "Nimi", + "session_details_title": "Sessioon" + }, + "device_type_icon_label": { + "desktop": "Töölauarakendus", + "mobile": "Rakendus nutitelefonis", + "pc": "Rakendus arvutis", + "tablet": "Rakendus tahvelarvutis", + "unknown": "Tundmatu seadme tüüp", + "web": "Veebirakendus" + }, + "email_in_use": { + "heading": "E-posti aadress {{email}} on juba kasutusel." + }, + "end_session_button": { + "confirmation_modal_title": "Kas sa oled kindel, et soovid selle sessiooni lõpetada?", + "text": "Eemalda seade" + }, + "error": { + "hideDetails": "Peida lisateave", + "showDetails": "Näita lisateavet", + "subtitle": "Tekkis ootamatu viga. Palun proovi uuesti.", + "title": "Midagi läks viltu" + }, + "error_boundary_title": "Midagi läks viltu", + "errors": { + "field_required": "Selle välja täitmine on kohustuslik", + "rate_limit_exceeded": "Sa oled lühikese aja jooksul teinud liiga palju päringuid. Palun oota paar minutit ja proovi uuesti." + }, + "last_active": { + "active_date": "Viimati kasutusel {{relativeDate}}", + "active_now": "Hetkel kasutusel", + "inactive_90_days": "Pole kasutusel olnud üle 90 päeva" + }, + "nav": { + "devices": "Seadmed", + "plan": "Teenusepakett", + "profile": "Profiil", + "sessions": "Sessioonid", + "settings": "Seadistused" + }, + "not_found_alert_title": "Ei leidu.", + "not_logged_in_alert": "Sa pole sisse loginud.", + "oauth2_client_detail": { + "details_title": "Kliendi teave", + "id": "Kliendi tunnus", + "name": "Nimi", + "policy": "Reeglid", + "terms": "Kasutustingimused" + }, + "oauth2_session_detail": { + "client_details_name": "Nimi", + "client_title": "Kliendi teave", + "session_details_title": "Sessioon" + }, + "pagination_controls": { + "total": "Kokku: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Senine salasõna", + "failure": { + "description": { + "account_locked": "Sinu kasutajakonto on lukustatud ning salasõna taastamist ei ole hetkel võimalik teha. Kui antud olukord tundub sulle kahtlane, siis palun teavita sellest sinu serveri haldajat.", + "expired_recovery_ticket": "Salasõna taastamiseks mõeldud link on aegunud. Palun alusta taastamisprotsessi algusest.", + "invalid_new_password": "Sinu sisestatud uus salasõna pole korrektne, ta ilmselt ei vasta seadistatud turvareeglitele.", + "no_current_password": "Sul on hetkel kehtiv salasõna määramata.", + "no_such_recovery_ticket": "Salasõna taastamiseks mõeldud link on vigane. Kui sa kopeerisid ja e-kirjast, siis palun kontrolli, et kopeerisid kogu lingi ühe tervikuna.", + "password_changes_disabled": "Salasõnade muutmine pole lubatud.", + "recovery_ticket_already_used": "Sa oled salasõna taastamiseks mõeldud linki juba kasutanud ja seda ei saa uuesti teha.", + "unspecified": "See ilmselt on ajutine probleem, palun proovi mõne aja pärast uuesti. Kui viga ei kao, siis abi saad serveri haldajalt.", + "wrong_password": "Sinu sisestatud salasõna ei vasta hetkel kehtivale salasõnale. Palun proovi uuesti.," + }, + "title": "Salasõna uuendamine ei õnnestunud" + }, + "new_password_again_label": "Sisesta uus salasõna uuesti", + "new_password_label": "Uus salasõna", + "passwords_match": "Salasõnad klapivad omavahel!", + "passwords_no_match": "Salasõnad ei klapi omavahel", + "subtitle": "Vali oma kasutajakontole uus salasõna.", + "success": { + "description": "Sinu salasõna muutmine õnnestus.", + "title": "Salasõna on muudetud" + }, + "title": "Muuda oma salasõna" + }, + "password_reset": { + "consumed": { + "subtitle": "Salasõna muutmiseks alusta uuesti ja vali „Unustasin salasõna“.", + "title": "Sinu salasõna lähtestamiseks mõeldud link on juba kasutatud" + }, + "expired": { + "resend_email": "Saada e-kiri uuesti", + "subtitle": "Palu uut e-kirja aadressile {{email}}", + "title": "Sinu salasõna lähtestamiseks mõeldud link on aegunud" + }, + "subtitle": "Vali oma kasutajakontole uus salasõna.", + "title": "Lähtesta oma salasõna" + }, + "password_strength": { + "placeholder": "Salasõna turvalisuse tase", + "score": { + "0": "Eriti nõrk salasõna", + "1": "Väga nõrk salasõna", + "2": "Nõrk salasõna", + "3": "Tugev salasõna", + "4": "Väga tugev salasõna" + }, + "suggestion": { + "all_uppercase": "Muuda mõned tähed suurtähtedeks, aga mitte kõik.", + "another_word": "Lisa juurde mõni vähelevinud sõna.", + "associated_years": "Väldi sinuga seotud aastanumbreid.", + "capitalization": "Lisaks esimesele suurähele lisa veel suurtähti.", + "dates": "Väldi sinuga seotud kuupäevi ja aastanumbreid.", + "l33t": "Väldi kergesti ennustatavaid täheasendusi nagu „@“ „a“ asemel.", + "longer_keyboard_pattern": "Kasuta pikemaid klahvivajutuse mustreid ja muuda paar korda liikumise suunda klaviatuuril.", + "no_need": "Sa saad luua turvalisi salasõnu ka ilma sümbolite, numbrite ja suurtähtedeta.", + "pwned": "Kui sa kasutad sama salasõna mujalgi, siis kindlasti muuda seda.", + "recent_years": "Väldi hiljutisi aastaid.", + "repeated": "Väldi korduvaid sõnu ja tähemärke.", + "reverse_words": "Väldi tavaliste sõnade kirjutamist tagurpidi.", + "sequences": "Väldi levinud tähemärkide järjestusi.", + "use_words": "Kasuta mitut sõna, kuid väldi levinud lauseid ja fraase." + }, + "too_weak": "See salasõna in liiga nõrk.", + "warning": { + "common": "See on üldlevinud salasõna.", + "common_names": "Levinud eesnimede ja perenimede äraarvamine on lihtne.", + "dates": "Kuupäevade äraarvamine on lihtne.", + "extended_repeat": "Tähemärkide korduvate kombinatsioonide, nagu „abcabcabc“ äraarvamine on lihtne.", + "key_pattern": "Lühikeste klaviatuurimustrite äraarvamine on lihtne.", + "names_by_themselves": "Üksikute eesnimede ja perenimede äraarvamine on lihtne.", + "pwned": "Sinu sisestatud salasõna leidub internetis rändama läinud salasõnade andmekogus.", + "recent_years": "Hiljutiste aastate äraarvamine on lihtne.", + "sequences": "Levinud tähejadade nagu „abc“ äraarvamine on lihtne.", + "similar_to_common": "See salasõna sarnaneb ühele tavaliselt kasutatavale salasõnale.", + "simple_repeat": "Korduvate tähejadade nagu „aaa“ äraarvamine on lihtne.", + "straight_row": "Klaviatuuril järjestikku asuvate klahvide põhiste tähejadade äraarvamine on lihtne.", + "top_hundred": "See on sagedasti kasutatav salasõna.", + "top_ten": "See on ülisagedasti kasutatav salasõna.", + "user_inputs": "Salasõnas ei peaks olema isiklikku ega selle veebilehega seotud andmeid.", + "word_by_itself": "Üksikute sõnade äraarvamine on lihtne." + } + }, + "reset_cross_signing": { + "button": "Lähtesta krüptoidentiteet", + "cancelled": { + "description_1": "Jätkamiseks võid sulgeda selle akna ja minna tagasi rakenduse juurde.", + "description_2": "Kui sa oled kõikidest oma seadmetest välja loginud ja ei mäleta oma kasutajakonto taastekoodi, on jätkuvalt võimalus oma krüptoidentiteet lähtestada.", + "heading": "Krüptoidentiteedi lähtestamine on katkestatud." + }, + "description": "Kui sa pole mitte üheski seadmes Matrixi võrku sisse loginud ja oled unustanud või kaotanud kõik oma kasutajakontole ligipääsu taastamise võimalused, siis sa pead rakenduse edasiseks kasutamiseks oma krüptoidentiteedi lähtestama.", + "effect_list": { + "negative_1": "Sa kaotad oma olemasolevate sõnumite ajaloo.", + "negative_2": "Sa pead kõik oma kasutatavad seadmed ja kontaktid uuesti verifitseerima", + "neutral_1": "Sa kaotad ligipääsu kõikidele sõnumitele, mis on salvestatud vaid serveris.", + "neutral_2": "Sa pead kõik oma kasutatavad seadmed ja kontaktid uuesti verifitseerima", + "positive_1": "Sinu kasutajakonto andmed, kontaktid, eelistused ja vestluste loendid jäävad muutmata" + }, + "failure": { + "description": "See ilmselt on ajutine probleem, palun proovi mõne aja pärast uuesti. Kui viga ei kao, siis abi saad serveri haldajalt.", + "heading": "Krüptoidentiteedi lähtestamise lubamine ei õnnestunud", + "title": "Viga krüptoidentiteedi oleku muutmisel" + }, + "finish_reset": "Lõpeta lähtestamine", + "heading": "Kui sa ei saa leida ühtegi muud viisis, siis lähtesta oma krüptoidentiteet", + "start_reset": "Alusta lähtestamist", + "success": { + "description": "Sinu krüptoidentiteedi lähtestamine on lubatud järgneva {{minutes}} minuti jooksul. Jätkamiseks võid selle akna sulgeda ja minna tagasi rakenduse juurde.", + "heading": "Krüptoidentiteedi lähtestamine õnnestus. Selle protsessi lõpetamiseks mine tagasi rakenduse juurde.", + "title": "Krüptoidentiteedi lähtestamine on ajutuselt lubatud" + }, + "warning": "Lähtesta oma krüptoidentiteet vaid siis, kui sul pole ligipääsu mitte ühelegi oma sisselogitud seadmele ja sa oled kaotanud oma kasutajakonto taastevõtme." + }, + "selectable_session": { + "label": "Vali sessioon" + }, + "session": { + "client_id_label": "Kliendi tunnus", + "current": "Praegune", + "current_badge": "Praegune", + "device_id_label": "Seadme tunnus", + "finished_date": "Lõpetatud ", + "finished_label": "Lõpetatud", + "generic_browser_session": "Brauseri sessioon", + "id_label": "ID", + "ip_label": "IP-aadress", + "last_active_label": "Viimati kasutusel", + "last_auth_label": "Viimati autenditud", + "name_for_platform": "{{name}} / {{platform}}", + "scopes_label": "Õigused", + "set_device_name": { + "help": "Sisesta nimi, mis aitab sul hiljem seda seadet ära tunda.", + "label": "Seadme nimi", + "title": "Muuda seadme nime" + }, + "signed_in_date": "Sisse logitud ", + "signed_in_label": "Sisse logitud", + "title": "Seadme andmed", + "unknown_browser": "Tundmatu brauser", + "unknown_device": "Tundmatu seade", + "uri_label": "Aadress", + "user_id_label": "Kasutajatunnus", + "username_label": "Kasutajanimi" + }, + "session_detail": { + "alert": { + "button": "Mine tagasi", + "text": "Seda sessiooni kas pole olemas või ta pole enam aktiivne.", + "title": "Sessiooni ei õnnestu leida: {{deviceId}}" + } + }, + "unknown_route": "Tundmatu marsruut {{route}}", + "unverified_email_alert": { + "button": "Vaata üle ja kinnita", + "text:one": "Sul on {{count}} kinnitamata e-posti aadress.", + "text:other": "Sul on {{count}} kinnitamata e-posti aadressi.", + "title": "Kinnitamata e-posti aadress" + }, + "user_email": { + "cant_delete_primary": "Selle e-posti aadressi kustutamiseks vali uus põhiline e-posti aadress.", + "delete_button_confirmation_modal": { + "action": "Kustuta e-posti aadress", + "body": "Kas kustutame selle e-posti aadressi?", + "incorrect_password": "Vale salasõna, palun proovi uuesti", + "password_confirmation": "Selle e-posti aadressi kustutamiseks kinnita tegevus oma salasõnaga" + }, + "delete_button_title": "Eemalda e-posti aadress", + "email": "E-posti aadress", + "make_primary_button": "Määra põhiliseks e-posti aadressiks", + "not_verified": "E-posti aadress on kinnitamata", + "primary_email": "Põhiline e-posti aadress", + "retry_button": "Saada kood uuesti", + "unverified": "Kinnitamata" + }, + "user_email_list": { + "heading": "E-posti aadressid", + "no_primary_email_alert": "Põhilist e-posti aadressi pole" + }, + "user_greeting": { + "error": "Kasutaja andmete laadimine ei õnnestunud" + }, + "user_name": { + "display_name_field_label": "Kuvatav nimi" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} aktiivne sessioon", + "active_sessions:other": "{{count}} aktiivset sessiooni", + "heading": "Kus sa oled sisse logitud", + "no_active_sessions": { + "default": "Sa pole mitte üheski rakenduses sisse loginud.", + "inactive_90_days": "Kõik sinu sessioonid on olnud aktiivsed viimase 90 päeva jooksul." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Kood on aegunud. Palun tee päring uue koodi saamiseks.", + "title": "Kood on aegunud" + }, + "code_field_error": "Kood pole õige", + "code_field_label": "6-kohaline kood", + "code_field_wrong_shape": "Kood peab olema 6-numbriline", + "email_sent_alert": { + "description": "Sisesta alljärgnevale väljale uus kood.", + "title": "Uus kood on saadetud" + }, + "enter_code_prompt": "Sisesta 6-kohaline kood, mis saadeti e-posti aadressile: {{email}}", + "heading": "Kinnita oma e-posti aadressi õigsust", + "invalid_code_alert": { + "description": "Jätkamiseks kirjuta siis kood, mille saatsime sinu e-posti aadressile.", + "title": "Sa sisestasid vale koodi" + }, + "resend_code": "Saada kood uuesti", + "resend_email": "Saada e-kiri uuesti", + "sent": "Saadetud!", + "unknown_email": "Tundmatu e-posti aadress" + } + }, + "mas": { + "scope": { + "edit_profile": "Muuta sinu kasutajaprofiili ning kontaktandmeid", + "manage_sessions": "Hallata sinu seadmeid ja sessioone", + "mas_admin": "Hallata kasutajaid (urn:mas:admin)", + "send_messages": "Saata sõnumeid sinu nimel", + "synapse_admin": "Hallata seda Synapse koduserverit (urn:synapse:admin:*)", + "view_messages": "Vaadata sinu sõnumeid ja andmeid", + "view_profile": "Vaadata sinu profiili teavet ja kontaktadmeid" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/fi.json b/matrix-authentication-service/frontend/locales/fi.json new file mode 100644 index 00000000..d310668c --- /dev/null +++ b/matrix-authentication-service/frontend/locales/fi.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Takaisin", + "cancel": "Peruuta", + "clear": "Tyhjennä", + "close": "Sulje", + "collapse": "Supista", + "confirm": "Vahvista", + "continue": "Jatka", + "edit": "Muokkaa", + "expand": "Laajenna", + "save": "Tallenna", + "save_and_continue": "Tallenna ja jatka", + "sign_out": "Kirjaudu ulos", + "start_over": "Aloita alusta" + }, + "branding": { + "privacy_policy": { + "alt": "Linkki palvelun tietosuojakäytäntöön", + "link": "Tietosuojakäytäntö" + }, + "terms_and_conditions": { + "alt": "Linkki palvelun käyttöehtoihin", + "link": "Käyttöehdot" + } + }, + "common": { + "add": "Lisää", + "e2ee": "Päästä päähän -salaus", + "error": "Virhe", + "loading": "Ladataan…", + "next": "Seuraava", + "password": "Salasana", + "previous": "Edellinen", + "saved": "Tallennettu", + "saving": "Tallennetaan…" + }, + "frontend": { + "account": { + "account_password": "Tilin salasana", + "contact_info": "Yhteystiedot", + "delete_account": { + "alert_description": "Tämä tili poistetaan pysyvästi, etkä enää pääse käsiksi viesteihisi.", + "alert_title": "Olet menettämässä kaikki tietosi", + "button": "Poista tili", + "dialog_description": "Vahvista, että haluat poistaa tilisi:\n\n\nEt voi aktivoida tiliäsi uudelleen\nEt voi enää kirjautua sisään\nKukaan ei voi käyttää käyttäjänimeäsi (MXID) uudelleen, et edes sinä\nPoistut kaikista huoneista ja keskusteluista, joissa olet\nSinut poistetaan identiteettipalvelimelta, eikä kukaan voi löytää sinua sähköpostiosoitteellasi tai puhelinnumerollasi\n\nVanhat viestisi näkyvät edelleen niille, jotka ovat vastaanottaneet ne. Haluatko piilottaa lähetetyt viestit niiltä, jotka liittyvät huoneisiin tulevaisuudessa?", + "dialog_title": "Poistetaanko tämä tili?", + "erase_checkbox_label": "Kyllä, piilota kaikki viestini uusilta liittyjiltä", + "incorrect_password": "Väärä salasana, yritä uudelleen", + "mxid_label": "Vahvista Matrix-tunnuksesi ({{ mxid }})", + "mxid_mismatch": "Tämä ei täsmää Matrix-tunnuksesi kanssa", + "password_label": "Kirjoita salasanasi jatkaaksesi" + }, + "edit_profile": { + "display_name_help": "Muut näkevät tämän kaikkialla, missä olet kirjautuneena sisään.", + "display_name_label": "Näyttönimi", + "title": "Muokkaa profiilia", + "username_label": "Käyttäjänimi" + }, + "password": { + "change": "Vaihda salasana", + "change_disabled": "Ylläpitäjä on poistanut salasanan vaihdon käytöstä.", + "label": "Salasana" + }, + "sign_out": { + "button": "Kirjaudu ulos tililtä", + "dialog": "Kirjaudutaanko ulos tältä tililtä?" + }, + "title": "Tilisi" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Palvelimen käytäntö ei salli syötettyä sähköpostia.", + "title": "Käytännön kieltämä sähköposti" + }, + "email_denied_error": "Palvelimen käytäntö ei salli syötettyä sähköpostia", + "email_exists_alert": { + "text": "Syötetty sähköpostiosoite on jo lisätty tähän tiliin", + "title": "Sähköposti on jo olemassa" + }, + "email_exists_error": "Syötetty sähköpostiosoite on jo lisätty tähän tiliin", + "email_field_help": "Lisää vaihtoehtoinen sähköpostiosoite, jolla voit käyttää tätä tiliä.", + "email_field_label": "Lisää sähköpostiosoite", + "email_in_use_error": "Annettu sähköpostiosoite on jo käytössä", + "email_invalid_alert": { + "text": "Syötetty sähköpostiosoite on virheellinen", + "title": "Virheellinen sähköpostiosoite" + }, + "email_invalid_error": "Syötetty sähköpostiosoite on virheellinen", + "incorrect_password_error": "Väärä salasana, yritä uudelleen", + "password_confirmation": "Vahvista tilisi salasana lisätäksesi tämän sähköpostiosoitteen" + }, + "app_sessions_list": { + "error": "Sovellusten istuntojen lataaminen epäonnistui", + "heading": "Sovellukset" + }, + "browser_session_details": { + "current_badge": "Nykyinen", + "session_details_title": "Istunto" + }, + "browser_sessions_overview": { + "body:one": "{{count}} aktiivinen istunto", + "body:other": "{{count}} aktiivista istuntoa", + "heading": "Selaimet", + "no_active_sessions": { + "default": "Et ole kirjautunut mihinkään verkkoselaimeen.", + "inactive_90_days": "Kaikki istuntosi ovat olleet aktiivisia viimeisten 90 päivän aikana." + }, + "view_all_button": "Näytä kaikki" + }, + "compat_session_detail": { + "client_details_title": "Sovelluksen tiedot", + "name": "Nimi", + "session_details_title": "Istunto" + }, + "device_type_icon_label": { + "desktop": "Pöytätietokone", + "mobile": "Puhelin", + "pc": "Tietokone", + "tablet": "Tabletti", + "unknown": "Tuntematon laitetyyppi", + "web": "Verkko" + }, + "email_in_use": { + "heading": "Sähköpostiosoite {{email}} on jo käytössä." + }, + "end_session_button": { + "confirmation_modal_title": "Haluatko varmasti lopettaa tämän istunnon?", + "text": "Poista laite" + }, + "error": { + "hideDetails": "Piilota tiedot", + "showDetails": "Näytä tiedot", + "subtitle": "Tapahtui odottamaton virhe. Yritä uudelleen.", + "title": "Jokin meni pieleen" + }, + "error_boundary_title": "Jokin meni pieleen", + "errors": { + "field_required": "Tämä kenttä on pakollinen", + "rate_limit_exceeded": "Olet tehnyt liian monta pyyntöä lyhyessä ajassa. Odota muutama minuutti ja yritä uudelleen." + }, + "last_active": { + "active_date": "Aktiivinen {{relativeDate}}", + "active_now": "Aktiivinen nyt", + "inactive_90_days": "Ei aktiivinen yli 90 päivää" + }, + "nav": { + "devices": "Laitteet", + "plan": "Tilaus", + "profile": "Profiili", + "sessions": "Istunnot", + "settings": "Asetukset" + }, + "not_found_alert_title": "Ei löytynyt.", + "not_logged_in_alert": "Et ole kirjautunut sisään.", + "oauth2_client_detail": { + "details_title": "Sovelluksen tiedot", + "id": "Sovelluksen tunnus", + "name": "Nimi", + "policy": "Käytäntö", + "terms": "Käyttöehdot" + }, + "oauth2_session_detail": { + "client_details_name": "Nimi", + "client_title": "Sovelluksen tiedot", + "session_details_title": "Istunto" + }, + "pagination_controls": { + "total": "Yhteensä: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Nykyinen salasana", + "failure": { + "description": { + "account_locked": "Tilisi on lukittu eikä sitä voi palauttaa tällä hetkellä. Jos et odottanut tätä, ota yhteyttä palvelimen ylläpitäjään.", + "expired_recovery_ticket": "Palautuslinkki on vanhentunut. Aloita tilin palautusprosessi uudelleen alusta.", + "invalid_new_password": "Valitsemasi uusi salasana on virheellinen; se ei ehkä vastaa määritettyä suojauskäytäntöä.", + "no_current_password": "Sinulla ei ole nykyistä salasanaa.", + "no_such_recovery_ticket": "Palautuslinkki on virheellinen. Jos kopioit linkin palautussähköpostista, tarkista, että kopioit koko linkin.", + "password_changes_disabled": "Salasanan vaihtaminen on poistettu käytöstä.", + "recovery_ticket_already_used": "Palautuslinkki on jo käytetty. Sitä ei voi käyttää uudelleen.", + "unspecified": "Tämä saattaa olla väliaikainen ongelma, joten yritä myöhemmin uudelleen. Jos ongelma jatkuu, ota yhteyttä palvelimen ylläpitäjään.", + "wrong_password": "Nykyiseksi salasanaksi antamasi salasana on virheellinen. Yritä uudelleen." + }, + "title": "Salasanan vaihtaminen epäonnistui" + }, + "new_password_again_label": "Syötä uusi salasana uudelleen", + "new_password_label": "Uusi salasana", + "passwords_match": "Salasanat täsmäävät!", + "passwords_no_match": "Salasanat eivät täsmää", + "subtitle": "Valitse tilillesi uusi salasana.", + "success": { + "description": "Salasanasi on vaihdettu onnistuneesti.", + "title": "Salasana vaihdettu" + }, + "title": "Vaihda salasanasi" + }, + "password_reset": { + "consumed": { + "subtitle": "Jos haluat luoda uuden salasanan, aloita alusta ja valitse \"Unohditko salasanasi?\".", + "title": "Linkki salasanasi palauttamiseen on jo käytetty" + }, + "expired": { + "resend_email": "Lähetä sähköposti uudelleen", + "subtitle": "Pyydä uusi sähköpostiviesti, joka lähetetään osoitteeseen: {{email}}", + "title": "Linkki salasanasi palauttamiseen on vanhentunut" + }, + "subtitle": "Valitse tilillesi uusi salasana.", + "title": "Palauta salasanasi" + }, + "password_strength": { + "placeholder": "Salasanan vahvuus", + "score": { + "0": "Erittäin heikko salasana", + "1": "Todella heikko salasana", + "2": "Heikko salasana", + "3": "Vahva salasana", + "4": "Todella vahva salasana" + }, + "suggestion": { + "all_uppercase": "Kirjoita jotkut kirjaimet isolla, mutta älä kaikkia.", + "another_word": "Lisää harvinaisempia sanoja.", + "associated_years": "Vältä vuosilukuja, jotka liittyvät sinuun.", + "capitalization": "Kirjoita isoilla kirjaimilla enemmän kuin vain ensimmäinen kirjain.", + "dates": "Vältä päivämääriä ja vuosia, jotka liittyvät sinuun.", + "l33t": "Vältä ennakoitavissa olevia kirjainten korvauksia, kuten 'a' korvaamista '@' symbolilla.", + "longer_keyboard_pattern": "Käytä pidempiä näppäinkuvioita ja vaihda kirjoitussuuntaa useita kertoja.", + "no_need": "Voit luoda vahvoja salasanoja ilman symboleja, numeroita tai isoja kirjaimia.", + "pwned": "Jos käytät tätä salasanaa muualla, vaihda se.", + "recent_years": "Vältä lähivuosia.", + "repeated": "Vältä toistuvia sanoja ja merkkejä.", + "reverse_words": "Vältä yleisien sanojen takaperin kirjoitusta.", + "sequences": "Vältä yleisiä merkkijonoja.", + "use_words": "Käytä useita sanoja, mutta vältä yleisiä lauseita." + }, + "too_weak": "Tämä salasana on liian heikko", + "warning": { + "common": "Tämä on yleisesti käytetty salasana.", + "common_names": "Yleiset nimet ja sukunimet on helppo arvata.", + "dates": "Päivämäärät on helppo arvata.", + "extended_repeat": "Toistuvat merkkikuviot, kuten \"abcabcabcabc\", on helppo arvata.", + "key_pattern": "Lyhyet näppäinkuviot on helppo arvata.", + "names_by_themselves": "Yksittäiset nimet tai sukunimet on helppo arvata.", + "pwned": "Salasanasi paljastui internetissä tapahtuneen tietomurron yhteydessä.", + "recent_years": "Lähivuodet on helppo arvata.", + "sequences": "Yleiset merkkijonot, kuten \"abc\", on helppo arvata.", + "similar_to_common": "Tämä on samanlainen kuin yleisesti käytetty salasana.", + "simple_repeat": "Toistuvat merkit, kuten \"aaa\", on helppo arvata.", + "straight_row": "Näppäimistön suorat näppäinrivit on helppo arvata.", + "top_hundred": "Tämä on usein käytetty salasana.", + "top_ten": "Tämä on paljon käytetty salasana.", + "user_inputs": "Henkilökohtaisia tai sivuun liittyviä tietoja ei pitäisi olla.", + "word_by_itself": "Yksittäiset sanat on helppo arvata." + } + }, + "reset_cross_signing": { + "button": "Nollaa identiteetti", + "cancelled": { + "description_1": "Voit sulkea tämän ikkunan ja palata sovellukseen jatkaaksesi.", + "description_2": "Jos olet kirjautunut ulos kaikkialta etkä muista palautuskoodiasi, sinun on silti nollattava identiteettisi.", + "heading": "Identiteetin nollaus peruutettu." + }, + "description": "Jos et ole kirjautunut muihin laitteisiin ja olet kadottanut palautusavaimesi, sinun on nollattava identiteettisi, jotta voit jatkaa sovelluksen käyttöä.", + "effect_list": { + "negative_1": "Menetät nykyisen viestihistoriasi", + "negative_2": "Sinun on vahvistettava kaikki olemassa olevat laitteesi ja yhteystietosi uudelleen", + "neutral_1": "Menetät kaiken viestihistorian, joka on tallella vain palvelimella", + "neutral_2": "Sinun on vahvistettava kaikki olemassa olevat laitteesi ja yhteystietosi uudelleen", + "positive_1": "Tilitietosi, yhteystiedot, asetukset ja keskustelulista säilytetään" + }, + "failure": { + "description": "Tämä saattaa olla väliaikainen ongelma, joten yritä myöhemmin uudelleen. Jos ongelma jatkuu, ota yhteyttä palvelimen ylläpitäjään.", + "heading": "Kryptografisen identiteetin nollauksen salliminen epäonnistui", + "title": "Kryptografisen identiteetin nollauksen salliminen epäonnistui" + }, + "finish_reset": "Viimeistele nollaus", + "heading": "Nollaa identiteettisi, jos et voi vahvistaa muulla tavalla", + "start_reset": "Aloita nollaus", + "success": { + "description": "Identiteetin nollaus on hyväksytty seuraavaksi {{minutes}} minuutiksi. Voit sulkea tämän ikkunan ja palata sovellukseen jatkaaksesi.", + "heading": "Identiteetin nollaus onnistui. Palaa takaisin sovellukseen viimeistelläksesi prosessin.", + "title": "Kryptografisen identiteetin nollaus tilapäisesti sallittu" + }, + "warning": "Nollaa identiteettisi vain, jos et voi käyttää toista laitetta, johon olet kirjautunut, ja olet kadottanut palautusavaimesi." + }, + "selectable_session": { + "label": "Valitse istunto" + }, + "session": { + "client_id_label": "Sovelluksen tunnus", + "current": "Nykyinen", + "current_badge": "Nykyinen", + "device_id_label": "Laitteen tunnus", + "finished_date": "Päättyi ", + "finished_label": "Päättyi", + "generic_browser_session": "Selainistunto", + "id_label": "ID", + "ip_label": "IP-osoite", + "last_active_label": "Viimeksi aktiivinen", + "last_auth_label": "Viimeisin todennus", + "name_for_platform": "{{name}} {{platform}}:lle", + "scopes_label": "Vaikutusalue", + "set_device_name": { + "help": "Aseta nimi, jonka avulla tunnistat tämän laitteen.", + "label": "Laitteen nimi", + "title": "Muokkaa laitteen nimeä" + }, + "signed_in_date": "Kirjautunut sisään ", + "signed_in_label": "Kirjautunut sisään", + "title": "Laitteen tiedot", + "unknown_browser": "Tuntematon selain", + "unknown_device": "Tuntematon laite", + "uri_label": "Osoite", + "user_id_label": "Käyttäjätunnus", + "username_label": "Käyttäjänimi" + }, + "session_detail": { + "alert": { + "button": "Palaa takaisin", + "text": "Tätä istuntoa ei ole olemassa tai se ei ole enää aktiivinen.", + "title": "Istuntoa ei löydy: {{deviceId}}" + } + }, + "unknown_route": "Tuntematon reitti {{route}}", + "unverified_email_alert": { + "button": "Tarkista ja vahvista", + "text:one": "Sinulla on {{count}} vahvistamaton sähköpostiosoite.", + "text:other": "Sinulla on {{count}} vahvistamatonta sähköpostiosoitetta.", + "title": "Vahvistamaton sähköpostiosoite" + }, + "user_email": { + "cant_delete_primary": "Valitse toinen ensisijainen sähköpostiosoite poistaaksesi tämän.", + "delete_button_confirmation_modal": { + "action": "Poista sähköposti", + "body": "Poistetaanko tämä sähköposti?", + "incorrect_password": "Väärä salasana, yritä uudelleen", + "password_confirmation": "Vahvista tilisi salasana poistaaksesi tämän sähköpostiosoitteen" + }, + "delete_button_title": "Poista sähköpostiosoite", + "email": "Sähköposti", + "make_primary_button": "Tee ensisijaiseksi", + "not_verified": "Ei vahvistettu", + "primary_email": "Ensisijainen sähköposti", + "retry_button": "Lähetä koodi uudelleen", + "unverified": "Vahvistamaton" + }, + "user_email_list": { + "heading": "Sähköpostit", + "no_primary_email_alert": "Ei ensisijaista sähköpostiosoitetta" + }, + "user_greeting": { + "error": "Käyttäjän lataaminen epäonnistui" + }, + "user_name": { + "display_name_field_label": "Näyttönimi" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} aktiivinen istunto", + "active_sessions:other": "{{count}} aktiivista istuntoa", + "heading": "Missä olet kirjautuneena", + "no_active_sessions": { + "default": "Et ole kirjautunut mihinkään sovellukseen.", + "inactive_90_days": "Kaikki istuntosi ovat olleet aktiivisia viimeisten 90 päivän aikana." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Koodi on vanhentunut. Pyydä uusi koodi.", + "title": "Koodi vanhentunut" + }, + "code_field_error": "Koodia ei tunnistettu", + "code_field_label": "6-numeroinen koodi", + "code_field_wrong_shape": "Koodin on oltava 6-numeroinen", + "email_sent_alert": { + "description": "Kirjoita uusi koodi alle.", + "title": "Uusi koodi lähetetty" + }, + "enter_code_prompt": "Syötä 6-numeroinen koodi, joka lähetettiin osoitteeseen: {{email}}", + "heading": "Vahvista sähköpostiosoitteesi", + "invalid_code_alert": { + "description": "Tarkista sähköpostiisi lähetetty koodi ja päivitä alla olevat kentät jatkaaksesi.", + "title": "Syötit väärän koodin" + }, + "resend_code": "Lähetä koodi uudelleen", + "resend_email": "Lähetä sähköposti uudelleen", + "sent": "Lähetetty!", + "unknown_email": "Tuntematon sähköpostiosoite" + } + }, + "mas": { + "scope": { + "edit_profile": "Muokata profiiliasi ja yhteystietojasi", + "manage_sessions": "Hallinnoida laitteitasi ja istuntojasi", + "mas_admin": "Hallinnoida käyttäjiä (urn:mas:admin)", + "send_messages": "Lähettää uusia viestejä puolestasi", + "synapse_admin": "Hallinnoida palvelinta (urn:synapse:admin:*)", + "view_messages": "Tarkastella olemassa olevia viestejäsi ja tietojasi", + "view_profile": "Tarkastella profiilitietojasi ja yhteystietojasi" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/fr.json b/matrix-authentication-service/frontend/locales/fr.json new file mode 100644 index 00000000..d9219d9f --- /dev/null +++ b/matrix-authentication-service/frontend/locales/fr.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Retour", + "cancel": "Annuler", + "clear": "Effacer", + "close": "Fermer", + "collapse": "Réduire", + "confirm": "Confirmer", + "continue": "Continuer", + "edit": "Éditer", + "expand": "Développer", + "save": "Enregistrer", + "save_and_continue": "Sauvegarder et continuer", + "sign_out": "Se déconnecter", + "start_over": "Recommencer" + }, + "branding": { + "privacy_policy": { + "alt": "Lien vers la politique de confidentialité", + "link": "Politique de confidentialité" + }, + "terms_and_conditions": { + "alt": "Lien vers les conditions d'utilisation", + "link": "Conditions d'utilisation" + } + }, + "common": { + "add": "Ajouter", + "e2ee": "Chiffrement de bout en bout", + "error": "Erreur", + "loading": "Chargement…", + "next": "Suivant", + "password": "Mot de passe", + "previous": "Précédent", + "saved": "Sauvegardé", + "saving": "Enregistrement..." + }, + "frontend": { + "account": { + "account_password": "Mot de passe du compte", + "contact_info": "Coordonnées", + "delete_account": { + "alert_description": "Ce compte sera définitivement supprimé et vous n'aurez plus accès à aucun de vos messages.", + "alert_title": "Vous êtes sur le point de perdre toutes vos données", + "button": "Supprimer le compte", + "dialog_description": "Confirmez que vous souhaitez supprimer votre compte :\n\n\nVous ne pourrez pas réactiver votre compte\nVous ne pourrez plus vous connecter\nPersonne ne pourra réutiliser votre nom d'utilisateur (MXID), y compris vous\nVous quitterez tous les salons et les messages directs dans lesquels vous vous trouvez\nVous serez retiré du serveur d'identité et personne ne pourra vous trouver avec votre e-mail ou votre numéro de téléphone\n\nVos anciens messages seront toujours visibles pour les personnes qui les ont reçus. Souhaitez-vous masquer les messages que vous envoyez aux personnes qui rejoindront des salons à l'avenir ?", + "dialog_title": "Supprimer ce compte ?", + "erase_checkbox_label": "Oui, masquer tous mes messages aux nouveaux arrivants", + "incorrect_password": "Mot de passe incorrect, veuillez réessayer", + "mxid_label": "Confirmez votre identifiant Matrix ({{ mxid }})", + "mxid_mismatch": "Cela ne correspond pas à votre identifiant Matrix", + "password_label": "Saisissez votre mot de passe pour continuer" + }, + "edit_profile": { + "display_name_help": "C'est ce que les autres personnes verront là où vous êtes connecté.", + "display_name_label": "Pseudonyme", + "title": "Editer le profil", + "username_label": "Nom d’utilisateur" + }, + "password": { + "change": "Changer de mot de passe", + "change_disabled": "Le changement de mot de passe a été désactivé par l'administrateur.", + "label": "Mot de passe" + }, + "sign_out": { + "button": "Se déconnecter du compte", + "dialog": "Se déconnecter de ce compte ?" + }, + "title": "Votre compte" + }, + "add_email_form": { + "email_denied_alert": { + "text": "L’adresse e-mail saisie n’est pas autorisée par la politique du serveur.", + "title": "Adresse e-mail refusé par la politique du serveur" + }, + "email_denied_error": "L’adresse e-mail saisie n’est pas conforme à la politique du serveur", + "email_exists_alert": { + "text": "L’adresse e-mail saisie est déjà ajoutée à ce compte", + "title": "Cette adresse e-mail existe déjà" + }, + "email_exists_error": "L’adresse e-mail saisie est déjà ajoutée à ce compte", + "email_field_help": "Ajouter une autre adresse e-mail à utiliser pour ce compte.", + "email_field_label": "Ajouter une adresse e-mail", + "email_in_use_error": "L’adresse e-mail saisie est déjà utilisée", + "email_invalid_alert": { + "text": "L’adresse e-mail saisie est invalide", + "title": "Adresse e-mail invalide" + }, + "email_invalid_error": "L’adresse e-mail saisie est invalide", + "incorrect_password_error": "Mot de passe incorrect, veuillez réessayer", + "password_confirmation": "Confirmez le mot de passe de votre compte pour ajouter cette adresse e-mail" + }, + "app_sessions_list": { + "error": "Impossible de charger les sessions d'application", + "heading": "Applications" + }, + "browser_session_details": { + "current_badge": "Actuel", + "session_details_title": "Session" + }, + "browser_sessions_overview": { + "body:one": "{{count}} session active", + "body:other": "{{count}} sessions actives", + "heading": "Navigateurs", + "no_active_sessions": { + "default": "Vous n'êtes connecté sur aucun navigateur web.", + "inactive_90_days": "Toutes vos sessions ont été actives au cours des 90 derniers jours." + }, + "view_all_button": "Tout voir" + }, + "compat_session_detail": { + "client_details_title": "Application", + "name": "Nom", + "session_details_title": "Session" + }, + "device_type_icon_label": { + "desktop": "Ordinateur", + "mobile": "Mobile", + "pc": "Ordinateur", + "tablet": "Tablette", + "unknown": "Type d’appareil inconnu", + "web": "Web" + }, + "email_in_use": { + "heading": "L’adresse e-mail {{email}} est déjà utilisée." + }, + "end_session_button": { + "confirmation_modal_title": "Êtes-vous sûr de vouloir terminer cette session ?", + "text": "Supprimer l’appareil" + }, + "error": { + "hideDetails": "Masquer les détails", + "showDetails": "Afficher les détails", + "subtitle": "Une erreur inattendue s'est produite. Veuillez réessayer", + "title": "Un problème est survenu" + }, + "error_boundary_title": "Un problème est survenu", + "errors": { + "field_required": "Ce champ est requis", + "rate_limit_exceeded": "Vous avez effectué trop de requêtes sur une courte période. Veuillez patienter quelques minutes et réessayer." + }, + "last_active": { + "active_date": "Actif {{relativeDate}}", + "active_now": "Actif maintenant", + "inactive_90_days": "Inactif depuis plus de 90 jours" + }, + "nav": { + "devices": "Appareil", + "plan": "Forfait", + "profile": "Profil", + "sessions": "Sessions", + "settings": "Paramètres" + }, + "not_found_alert_title": "Introuvable.", + "not_logged_in_alert": "Vous n’êtes pas connecté.", + "oauth2_client_detail": { + "details_title": "Application", + "id": "Client ID", + "name": "Nom", + "policy": "Politique de confidentialité", + "terms": "Conditions d’utilisation" + }, + "oauth2_session_detail": { + "client_details_name": "Nom", + "client_title": "Application", + "session_details_title": "Session" + }, + "pagination_controls": { + "total": "Total : {{totalCount}}" + }, + "password_change": { + "current_password_label": "Mot de passe actuel", + "failure": { + "description": { + "account_locked": "Votre compte est bloqué et ne peut pas être récupéré pour le moment. Si cela n'est pas normal, contactez l'administrateur de votre serveur.", + "expired_recovery_ticket": "Le lien de récupération a expiré. Veuillez recommencer le processus de récupération de compte depuis le début.", + "invalid_new_password": "Le mot de passe choisi n'est pas valide ; il ne correspond peut-être pas à la politique de sécurité configurée.", + "no_current_password": "Vous n'avez pas de mot de passe actuel.", + "no_such_recovery_ticket": "Le lien de restauration n'est pas valide. Si vous avez copié le lien depuis l'e-mail de restauration, vérifiez que le lien complet a été copié.", + "password_changes_disabled": "Les changements de mot de passe sont désactivés.", + "recovery_ticket_already_used": "Le lien de restauration a déjà été utilisé. Il ne peut pas être réutilisé.", + "unspecified": "Il s'agit peut-être d'un problème temporaire, veuillez donc réessayer ultérieurement. Si le problème persiste, contactez l'administrateur de votre serveur.", + "wrong_password": "Le mot de passe que vous avez fourni comme mot de passe actuel est incorrect. Veuillez réessayer." + }, + "title": "Impossible de mettre à jour le mot de passe" + }, + "new_password_again_label": "Entrez de nouveau votre nouveau mot de passe", + "new_password_label": "Nouveau mot de passe", + "passwords_match": "Les mots de passe correspondent.", + "passwords_no_match": "Les mots de passe ne correspondent pas", + "subtitle": "Choisissez un nouveau mot de passe pour votre compte.", + "success": { + "description": "Votre mot de passe a été mis à jour avec succès.", + "title": "Mot de passe mis à jour" + }, + "title": "Modifier votre mot de passe" + }, + "password_reset": { + "consumed": { + "subtitle": "Pour créer un nouveau mot de passe, recommencez et sélectionnez « Mot de passe oublié ».", + "title": "Le lien pour réinitialiser votre mot de passe a déjà été utilisé" + }, + "expired": { + "resend_email": "Renvoyer l’e-mail", + "subtitle": "Demander un nouvel e-mail qui sera envoyé à : {{email}}", + "title": "Le lien pour réinitialiser votre mot de passe a expiré" + }, + "subtitle": "Choisissez un nouveau mot de passe pour votre compte.", + "title": "Réinitialiser votre mot de passe" + }, + "password_strength": { + "placeholder": "Complexité du mot de passe", + "score": { + "0": "Mot de passe extrêmement faible", + "1": "Mot de passe très faible", + "2": "Mot de passe faible", + "3": "Mot de passe fort", + "4": "Mot de passe très fort" + }, + "suggestion": { + "all_uppercase": "Mettez certaines lettres en majuscule, mais pas toutes.", + "another_word": "Ajoutez des mots moins courants.", + "associated_years": "Évitez les années qui vous sont associées.", + "capitalization": "Mettez plus de majuscule que simplement la première lettre.", + "dates": "Évitez les dates et les années qui vous sont associées.", + "l33t": "Évitez les substitutions de lettres prévisibles telles que « @ » pour « a ».", + "longer_keyboard_pattern": "Utilisez des suites de caractères plus longues sur votre clavier, en changeant de direction plusieurs fois.", + "no_need": "Vous pouvez créer des mots de passe sécurisés sans utiliser de symboles, de chiffres ou de majuscules.", + "pwned": "Si vous utilisez ce mot de passe ailleurs, vous devriez le modifier.", + "recent_years": "Évitez les dernières années.", + "repeated": "Évitez les répétitions de mots et de caractères.", + "reverse_words": "Évitez d'utiliser des mots courants inversés.", + "sequences": "Évitez les séquences de caractères courantes.", + "use_words": "Utilisez plusieurs mots, mais évitez les expressions courantes." + }, + "too_weak": "Ce mot de passe est trop faible", + "warning": { + "common": "Il s’agit d’un mot de passe couramment utilisé.", + "common_names": "Les noms et prénoms communs sont faciles à deviner.", + "dates": "Les dates sont faciles à deviner.", + "extended_repeat": "Les caractères répétés comme \"abcabcabc\" sont faciles à deviner.", + "key_pattern": "Les suites de touches de clavier courtes sont faciles à deviner.", + "names_by_themselves": "Les noms et prénoms seuls sont faciles à deviner.", + "pwned": "Votre mot de passe a été exposé lors d'une fuite de données sur Internet.", + "recent_years": "Les dernières années sont faciles à deviner.", + "sequences": "Les séquences de caractères courantes comme « abc » sont faciles à deviner.", + "similar_to_common": "Ce mot de passe est similaire à un mot de passe couramment utilisé.", + "simple_repeat": "Les caractères répétés tels que « aaa » sont faciles à deviner.", + "straight_row": "Les suites de touches droites de votre clavier sont faciles à deviner.", + "top_hundred": "Il s'agit d'un mot de passe fréquemment utilisé.", + "top_ten": "Il s'agit d'un mot de passe très utilisé.", + "user_inputs": "Votre mot de passe ne dois pas contenir de données personnelles ou relatives à la page.", + "word_by_itself": "Les mots seuls sont faciles à deviner." + } + }, + "reset_cross_signing": { + "button": "Réinitialiser l'identité", + "cancelled": { + "description_1": "Vous pouvez fermer cette fenêtre et revenir à l'application pour continuer.", + "description_2": "Si vous êtes déconnecté partout et que vous ne vous souvenez pas de votre code de récupération, vous devrez tout de même réinitialiser votre identité.", + "heading": "La réinitialisation d'identité a été annulée." + }, + "description": "Si vous n'êtes connecté à aucun autre appareil et que vous avez perdu votre clé de récupération, vous devrez réinitialiser votre identité pour continuer à utiliser l'application.", + "effect_list": { + "negative_1": "Vous perdrez votre historique de messages existant", + "negative_2": "Vous devrez vérifier à nouveau tous vos appareils et contacts existants", + "neutral_1": "Vous perdrez l’historique de vos messages", + "neutral_2": "Vous devrez vérifier à nouveau tous vos appareils et contacts existants", + "positive_1": "Les détails de votre compte, vos contacts, vos préférences et votre liste de discussion seront conservés" + }, + "failure": { + "description": "Il s'agit peut-être d'un problème temporaire, veuillez donc réessayer ultérieurement. Si le problème persiste, contactez l'administrateur de votre serveur.", + "heading": "Impossible d'autoriser la réinitialisation de l'identité cryptographique", + "title": "Impossible d'autoriser le remplacement de l'identité cryptographique" + }, + "finish_reset": "Terminer la réinitialisation", + "heading": "Réinitialisez votre identité au cas où vous ne pourriez pas la vérifier autrement", + "start_reset": "Démarrer la réinitialisation", + "success": { + "description": "La réinitialisation de l'identité a été approuvée pour les prochaines {{minutes}} minutes. Vous pouvez fermer cette fenêtre et revenir à l'application pour continuer.", + "heading": "Réinitialisation de l'identité réussie. Retournez dans l'application pour terminer le processus.", + "title": "Réinitialisation de l'identité cryptographique temporairement autorisée" + }, + "warning": "Ne réinitialisez votre identité que si vous n'avez accès à aucun autre appareil connecté et que vous avez perdu votre clé de récupération." + }, + "selectable_session": { + "label": "Sélectionner une session" + }, + "session": { + "client_id_label": "Client ID", + "current": "Actuel", + "current_badge": "Actuel", + "device_id_label": "ID de l’appareil", + "finished_date": "Terminé ", + "finished_label": "Terminée", + "generic_browser_session": "Session de navigateur", + "id_label": "ID", + "ip_label": "Adresse IP", + "last_active_label": "Dernière activité", + "last_auth_label": "Dernière authentification", + "name_for_platform": "{{name}} pour {{platform}}", + "scopes_label": "Scopes", + "set_device_name": { + "help": "Définissez un nom qui vous aidera à identifier cet appareil.", + "label": "Nom de l'appareil", + "title": "Modifier le nom de l'appareil" + }, + "signed_in_date": "Connecté ", + "signed_in_label": "Connecté", + "title": "Détails de la session", + "unknown_browser": "Navigateur inconnu", + "unknown_device": "Appareil inconnu", + "uri_label": "Uri", + "user_id_label": "Identifiant", + "username_label": "Nom d'utilisateur" + }, + "session_detail": { + "alert": { + "button": "Revenir en arrière", + "text": "Cette session n'existe pas ou n'est plus active.", + "title": "Impossible de trouver la session : {{deviceId}}" + } + }, + "unknown_route": "Route inconnue {{route}}", + "unverified_email_alert": { + "button": "Examiner et confirmer", + "text:one": "Vous avez {{count}} adresse e-mail non vérifiée.", + "text:other": "Vous avez {{count}} adresses e-mail non vérifiées.", + "title": "Adresse e-mail non vérifiée" + }, + "user_email": { + "cant_delete_primary": "Changez d'adresse e-mail principale pour supprimer celle-ci.", + "delete_button_confirmation_modal": { + "action": "Supprimer l'adresse e-mail", + "body": "Supprimer cette adresse e-mail ?", + "incorrect_password": "Mot de passe incorrect, veuillez réessayer", + "password_confirmation": "Confirmez le mot de passe de votre compte pour supprimer cette adresse e-mail" + }, + "delete_button_title": "Supprimer l’adresse e-mail", + "email": "Adresse e-mail", + "make_primary_button": "Utiliser comme adresse principale", + "not_verified": "Non vérifié", + "primary_email": "Adresse e-mail principale", + "retry_button": "Renvoyer le code", + "unverified": "Non vérifiée" + }, + "user_email_list": { + "heading": "Adresses e-mail", + "no_primary_email_alert": "Aucune adresse e-mail principale" + }, + "user_greeting": { + "error": "Impossible de charger l'utilisateur" + }, + "user_name": { + "display_name_field_label": "Pseudonyme" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} session active", + "active_sessions:other": "{{count}} sessions actives", + "heading": "Où vous êtes connecté", + "no_active_sessions": { + "default": "Vous n'êtes connecté à aucune application.", + "inactive_90_days": "Toutes vos sessions ont été actives au cours des 90 derniers jours." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Le code a expiré. Veuillez demander un nouveau code.", + "title": "Code expiré" + }, + "code_field_error": "Code non reconnu", + "code_field_label": "Code à 6 chiffres", + "code_field_wrong_shape": "Le code doit être composé de 6 chiffres", + "email_sent_alert": { + "description": "Saisissez le nouveau code ci-dessous.", + "title": "Nouveau code envoyé" + }, + "enter_code_prompt": "Entrez le code à 6 chiffres envoyé à : {{email}}", + "heading": "Vérifiez votre adresse e-mail", + "invalid_code_alert": { + "description": "Vérifiez le code envoyé à votre adresse e-mail et entrez le dans le champ ci-dessous pour continuer.", + "title": "Vous avez saisi un code erroné" + }, + "resend_code": "Renvoyer le code", + "resend_email": "Renvoyer l’e-mail", + "sent": "Envoyé !", + "unknown_email": "Adresse e-mail inconnue" + } + }, + "mas": { + "scope": { + "edit_profile": "Modifier votre profil et vos coordonnées", + "manage_sessions": "Gérer vos appareils et vos sessions", + "mas_admin": "Administrer les utilisateurs (urn:mas:admin)", + "send_messages": "Envoyez de nouveaux messages en votre nom", + "synapse_admin": "Administrer le serveur (urn:synapse:admin:*)", + "view_messages": "Afficher vos messages et données existants", + "view_profile": "Voir les informations de votre profil et vos coordonnées" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/hu.json b/matrix-authentication-service/frontend/locales/hu.json new file mode 100644 index 00000000..888002d9 --- /dev/null +++ b/matrix-authentication-service/frontend/locales/hu.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Vissza", + "cancel": "Mégse", + "clear": "Törlés", + "close": "Bezárás", + "collapse": "Összecsukás", + "confirm": "Megerősítés", + "continue": "Folytatás", + "edit": "Szerkesztés", + "expand": "Kibontás", + "save": "Mentés", + "save_and_continue": "Mentés és folytatás", + "sign_out": "Kijelentkezés", + "start_over": "Újrakezdés" + }, + "branding": { + "privacy_policy": { + "alt": "Hivatkozás a szolgáltatás adatvédelmi irányelveire", + "link": "Adatvédelmi irányelvek" + }, + "terms_and_conditions": { + "alt": "Hivatkozás a szolgáltatási feltételekre", + "link": "Szolgáltatási feltételek" + } + }, + "common": { + "add": "Hozzáadás", + "e2ee": "Végpontok közti titkosítás", + "error": "Hiba", + "loading": "Betöltés…", + "next": "Következő", + "password": "Jelszó", + "previous": "Előző", + "saved": "Mentve", + "saving": "Mentés…" + }, + "frontend": { + "account": { + "account_password": "Fiók jelszava", + "contact_info": "Kapcsolati információ", + "delete_account": { + "alert_description": "Ez a fiók véglegesen törölve lesz, és többé nem fog hozzáférni az üzeneteihez.", + "alert_title": "Hamarosan elveszíti az összes adatát", + "button": "Fiók törlése", + "dialog_description": "Erősítse meg, hogy törölné a fiókját:\n\n\nNem fogja tudni újraaktiválni a fiókját\nTöbbé nem fog tudni bejelentkezni\nSenki sem fogja tudni használni a felhasználónevét (MXID), Önt is beleértve\nElhagyja az összes szobáját és a közvetlen üzeneteit\nEl lesz távolítva az azonosítási kiszolgálóról, és senki sem fogja tudni megtalálni az e-mail-címe vagy telefonszáma alapján\n\nA régi üzenetei továbbra is láthatóak lesznek azok számára, akik megkapták azokat. Elrejti az elküldött üzeneteit azok elől, akik a jövőben csatlakoznak a szobákhoz?", + "dialog_title": "Törli ezt a fiókot?", + "erase_checkbox_label": "Igen, az összes üzenet elrejtése az új érkezők elől", + "incorrect_password": "Helytelen jelszó, próbálja újra", + "mxid_label": "Erősítse meg a Matrix-azonosítóját ({{ mxid }})", + "mxid_mismatch": "Ez az érték nem egyezik a Matrix-azonosítójával", + "password_label": "A folytatáshoz adja meg a jelszavát" + }, + "edit_profile": { + "display_name_help": "Ez az, amit mások látni fognak, ha be van jelentkezve.", + "display_name_label": "Megjelenítési név", + "title": "Profil szerkesztése", + "username_label": "Felhasználói név" + }, + "password": { + "change": "Jelszó módosítása", + "change_disabled": "A jelszóváltoztatást letiltotta a rendszergazda.", + "label": "Jelszó" + }, + "sign_out": { + "button": "Kijelentkezés a fiókból", + "dialog": "Kijelentkezik ebből a fiókból?" + }, + "title": "Saját fiók" + }, + "add_email_form": { + "email_denied_alert": { + "text": "A megadott e-mail-címet nem engedélyezi a kiszolgáló házirendje.", + "title": "E-mail-cím házirend alapján elutasítva" + }, + "email_denied_error": "A megadott e-mail-címet nem engedélyezi a kiszolgáló házirendje", + "email_exists_alert": { + "text": "A megadott e-mail-cím már hozzá lett adva ehhez a fiókhoz", + "title": "Az e-mail-cím már létezik" + }, + "email_exists_error": "A megadott e-mail-cím már hozzá lett adva ehhez a fiókhoz", + "email_field_help": "Alternatív e-mail-cím hozzáadása, mellyel hozzáférhet ehhez a fiókhoz.", + "email_field_label": "E-mail-cím hozzáadása", + "email_in_use_error": "A megadott e-mail-cím már használatban van", + "email_invalid_alert": { + "text": "A megadott e-mail-cím érvénytelen", + "title": "Érvénytelen e-mail-cím" + }, + "email_invalid_error": "A megadott e-mail-cím érvénytelen", + "incorrect_password_error": "Helytelen jelszó, próbálja újra", + "password_confirmation": "Erősítse meg a fiókja jelszavát az e-mail-cím hozzáadásához" + }, + "app_sessions_list": { + "error": "Az alkalmazás munkameneteinek betöltése sikertelen", + "heading": "Alkalmazások" + }, + "browser_session_details": { + "current_badge": "Jelenlegi", + "session_details_title": "Munkamenet" + }, + "browser_sessions_overview": { + "body:one": "{{count}} aktív munkamenet", + "body:other": "{{count}} aktív munkamenet", + "heading": "Böngészők", + "no_active_sessions": { + "default": "Még egyetlen webböngészőben sem jelentkezett be.", + "inactive_90_days": "Az összes munkamenete aktív volt az elmúlt 90 napban." + }, + "view_all_button": "Összes megtekintése" + }, + "compat_session_detail": { + "client_details_title": "Kliensinformációk", + "name": "Név", + "session_details_title": "Munkamenet" + }, + "device_type_icon_label": { + "desktop": "Asztali számítógép", + "mobile": "Mobil", + "pc": "Számítógép", + "tablet": "Táblagép", + "unknown": "Ismeretlen eszköztípus", + "web": "Web" + }, + "email_in_use": { + "heading": "A(z) {{email}} e-mail-cím már használatban van." + }, + "end_session_button": { + "confirmation_modal_title": "Biztos, hogy befejezi a munkamenetet?", + "text": "Eszköz eltávolítása" + }, + "error": { + "hideDetails": "Részletek elrejtése", + "showDetails": "Részletek megjelenítése", + "subtitle": "Váratlan hiba történt. Próbálja újra.", + "title": "Valamilyen hiba történt" + }, + "error_boundary_title": "Valamilyen hiba történt", + "errors": { + "field_required": "Ez a mező kötelező", + "rate_limit_exceeded": "Túl sok kérést adott fel egy rövid időszak alatt. Várjon néhány percet, és próbálja újra." + }, + "last_active": { + "active_date": "{{relativeDate}} aktív", + "active_now": "Jelenleg aktív", + "inactive_90_days": "90+ napja inaktív" + }, + "nav": { + "devices": "Eszközök", + "plan": "Terv", + "profile": "Profil", + "sessions": "Munkamenetek", + "settings": "Beállítások" + }, + "not_found_alert_title": "Nem található.", + "not_logged_in_alert": "Nincs bejelentkezve.", + "oauth2_client_detail": { + "details_title": "Kliensinformációk", + "id": "Kliensazonosító", + "name": "Név", + "policy": "Házirend", + "terms": "Szolgáltatás feltételei" + }, + "oauth2_session_detail": { + "client_details_name": "Név", + "client_title": "Kliensinformációk", + "session_details_title": "Munkamenet" + }, + "pagination_controls": { + "total": "Összesen: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Jelenlegi jelszó", + "failure": { + "description": { + "account_locked": "A fiókja zárolva van, és jelenleg nem állítható helyre. Ha erre nem számított, akkor lépjen kapcsolatba a kiszolgáló rendszergazdájával.", + "expired_recovery_ticket": "A helyreállítási hivatkozás lejárt. Kezdje az elejéről a fiókja helyreállítási folyamatát.", + "invalid_new_password": "A választott új jelszó érvénytelen; lehet, hogy nem felel meg a beállított biztonsági házirendnek.", + "no_current_password": "Nincs jelenlegi jelszava.", + "no_such_recovery_ticket": "A helyreállítási hivatkozás érvénytelen. Ha a helyreállítási üzenetből másolta ki a hivatkozást, akkor ellenőrizze, hogy a teljes hivatkozást átmásolta-e.", + "password_changes_disabled": "A jelszómódosítás le van tiltva.", + "recovery_ticket_already_used": "A helyreállítási hivatkozás már fel lett használva. Többé nem használható.", + "unspecified": "Ez ideiglenes probléma is lehet, így próbálja újra később. Ha a probléma továbbra is fennáll, lépjen kapcsolatba a kiszolgáló rendszergazdájával.", + "wrong_password": "A jelenlegi jelszóként megadott jelszó helytelen. Próbálja újra." + }, + "title": "A jelszó frissítése sikertelen" + }, + "new_password_again_label": "Adja meg a jelszót újból", + "new_password_label": "Új jelszó", + "passwords_match": "A jelszavak megegyeznek!", + "passwords_no_match": "A jelszavak nem egyeznek", + "subtitle": "Válasszon egy új jelszót a fiókjához.", + "success": { + "description": "A jelszava sikeresen frissült.", + "title": "Jelszó frissítve" + }, + "title": "Jelszó módosítása" + }, + "password_reset": { + "consumed": { + "subtitle": "Új jelszó létrehozásához kezdje elölről, és válassza a „Elfelejtett jelszó” lehetőséget.", + "title": "A jelszó-visszaállítási hivatkozás már fel lett használva" + }, + "expired": { + "resend_email": "Levél újraküldése", + "subtitle": "Új levél kérése, amely ide lesz elküldve: {{email}}", + "title": "A jelszó-visszaállítási hivatkozás lejárt" + }, + "subtitle": "Válasszon egy új jelszót a fiókjához.", + "title": "Jelszó visszaállítása" + }, + "password_strength": { + "placeholder": "Jelszó erőssége", + "score": { + "0": "Rendkívül gyenge jelszó", + "1": "Nagyon gyenge jelszó", + "2": "Gyenge jelszó", + "3": "Erős jelszó", + "4": "Nagyon erős jelszó" + }, + "suggestion": { + "all_uppercase": "Használjon nagybetűket, de nem mindnél.", + "another_word": "Adjon hozzá néhány kevésbé gyakori szót.", + "associated_years": "Kerülje az Önhöz köthető éveket.", + "capitalization": "Ne csak az első betű legyen nagybetűs.", + "dates": "Kerülje az Önhöz köthető dátumokat és éveket.", + "l33t": "Kerülje a kiszámítható betűhelyettesítéseket, mint az „a” helyetti „@”.", + "longer_keyboard_pattern": "Használjon hosszabb billentyűzetmintát, és többször módosítsa a gépelési irányt.", + "no_need": "Anélkül is hozhat létre erős jelszavakat, hogy szimbólumokat, számokat vagy nagybetűket használna.", + "pwned": "Ha máshol is használja ezt a jelszót, akkor változtassa meg.", + "recent_years": "Kerülje a közelmúltbeli éveket.", + "repeated": "Kerülje az ismétlődő szavakat és karaktereket.", + "reverse_words": "Kerülje a gyakori szavak fordított betűzését.", + "sequences": "Kerülje a gyakori karaktersorozatokat.", + "use_words": "Használjon több szót, de kerülje a gyakori kifejezéseket." + }, + "too_weak": "Ez a jelszó túl gyenge", + "warning": { + "common": "Ez egy gyakran használt jelszó.", + "common_names": "A gyakori nevek és vezetéknevek könnyen kitalálhatóak.", + "dates": "A dátumok könnyen kitalálhatóak.", + "extended_repeat": "Az ismétlődő karaktersorozatok, mint az „abcabcabc” könnyen kitalálhatóak.", + "key_pattern": "A rövid billentyűzetminták könnyen kitalálhatóak.", + "names_by_themselves": "Az egy nevet vagy vezetéknevet tartalmazó jelszók könnyen kitalálhatók.", + "pwned": "A jelszava egy adatvédelmi incidensben kikerült az internetre.", + "recent_years": "A közelmúltbeli évek könnyen kitalálhatóak.", + "sequences": "A gyakori karaktersorozatok, mint az „abc”, könnyen kitalálhatóak.", + "similar_to_common": "Ez hasonlít egy gyakran használt jelszóhoz.", + "simple_repeat": "Az ismétlődő karakterek, mint az „aaa” könnyen kitalálhatóak.", + "straight_row": "A billentyűzeten szereplő karaktersorok könnyen kitalálhatóak.", + "top_hundred": "Ez egy gyakran használt jelszó.", + "top_ten": "Ez egy nagyon gyakran használt jelszó.", + "user_inputs": "Ne legyen benne személyes, vagy az oldallal kapcsolatos adat.", + "word_by_itself": "Az egy szavas jelszavak könnyen kitalálhatóak." + } + }, + "reset_cross_signing": { + "button": "Személyazonosság alaphelyzetbe állítása", + "cancelled": { + "description_1": "A folytatáshoz bezárhatja ezt az ablakot, és visszatérhet az alkalmazáshoz.", + "description_2": "Ha mindenhonnan kijelentkezett, és nem emlékszik a helyreállítási kódjára, akkor alaphelyzetbe kell állítania a személyazonosságát.", + "heading": "Személyazonosság alaphelyzetbe állítása megszakítva." + }, + "description": "Ha nincs bejelentkezve egyetlen más eszközön sem, és elvesztette a helyreállítási kulcsát, akkor az alkalmazás használatának folytatásához alaphelyzetbe kell állítania a személyazonosságát.", + "effect_list": { + "negative_1": "El fogja veszíteni a meglévő üzenetelőzményeit", + "negative_2": "Újból ellenőriznie kell az összes meglévő eszközét és kapcsolatát", + "neutral_1": "El fogja veszíteni a csak a kiszolgálón tárolt üzenetelőzményeit", + "neutral_2": "Újból ellenőriznie kell az összes meglévő eszközét és kapcsolatát", + "positive_1": "A fiókja részletei, a névjegyei, a beállításai és a csevegéslistája meg lesz tartva" + }, + "failure": { + "description": "Ez ideiglenes probléma is lehet, így próbálja újra később. Ha a probléma továbbra is fennáll, lépjen kapcsolatba a kiszolgáló rendszergazdájával.", + "heading": "A kriptográfiai személyazonossága alaphelyzetbe állításának engedélyezése sikertelen", + "title": "A kriptográfiai személyazonosságának engedélyezése sikertelen" + }, + "finish_reset": "Alaphelyzetbe állítás befejezése", + "heading": "Állítsa alaphelyzetbe a személyazonosságát, ha semmilyen más módon nem tudja megerősíteni", + "start_reset": "Alaphelyzetbe állítás elkezdése", + "success": { + "description": "A személyazonosság alaphelyzetbe állítása engedélyezve a következő {{minutes}} percre. A folytatáshoz bezárhatja azt az ablakot, és visszatérhet az alkalmazáshoz.", + "heading": "Személyazonosság sikeresen alaphelyzetbe állítva. A folyamat befejezéséhez térjen vissza az alkalmazáshoz.", + "title": "A kriptográfiai személyazonossága alaphelyzetbe állítása ideiglenesen engedélyezve" + }, + "warning": "Csak akkor állítsa alaphelyzetbe a személyazonosságát, ha nem fér hozzá más bejelentkezett eszközhöz, és elveszítette a helyreállítási kulcsát." + }, + "selectable_session": { + "label": "Munkamenet kiválasztása" + }, + "session": { + "client_id_label": "Kliensazonosító", + "current": "Jelenlegi", + "current_badge": "Jelenlegi", + "device_id_label": "Eszközazonosító", + "finished_date": "Befejezve: ", + "finished_label": "Befejezve", + "generic_browser_session": "Böngésző-munkamenet", + "id_label": "Azonosító", + "ip_label": "IP-cím", + "last_active_label": "Legutóbb aktív", + "last_auth_label": "Legutóbbi hitelesítés", + "name_for_platform": "{{name}} erre: {{platform}}", + "scopes_label": "Hatókörök", + "set_device_name": { + "help": "Adjon meg egy nevet, amely segít az eszköz azonosításában.", + "label": "Eszköz neve", + "title": "Eszköz nevének szerkesztése" + }, + "signed_in_date": "Bejelentkezve: ", + "signed_in_label": "Bejelentkezve", + "title": "Eszköz részletei", + "unknown_browser": "Ismeretlen böngésző", + "unknown_device": "Ismeretlen eszköz", + "uri_label": "URI", + "user_id_label": "Felhasználóazonosító", + "username_label": "Felhasználónév" + }, + "session_detail": { + "alert": { + "button": "Vissza", + "text": "Ez a munkamenet nem létezik, vagy már nem aktív.", + "title": "A munkamenet nem található: {{deviceId}}" + } + }, + "unknown_route": "Ismeretlen útvonal: {{route}}", + "unverified_email_alert": { + "button": "Áttekintés és ellenőrzés", + "text:one": "{{count}} nem ellenőrzött e-mail-címe van.", + "text:other": "{{count}} nem ellenőrzött e-mail-címe van.", + "title": "Nem megerősített e-mail-cím" + }, + "user_email": { + "cant_delete_primary": "Válasszon egy másik elsődleges e-mail-címet, hogy törölhesse ezt.", + "delete_button_confirmation_modal": { + "action": "E-mail-cím törlése", + "body": "Törli ezt az e-mail-címet?", + "incorrect_password": "Helytelen jelszó, próbálja újra", + "password_confirmation": "Erősítse meg a fiókja jelszavát az e-mail-cím törléséhez" + }, + "delete_button_title": "E-mail-cím eltávolítása", + "email": "E-mail", + "make_primary_button": "Elsődlegessé tétel", + "not_verified": "Nincs ellenőrizve", + "primary_email": "Elsődleges e-mail-cím", + "retry_button": "Kód újraküldése", + "unverified": "Ellenőrizetlen" + }, + "user_email_list": { + "heading": "Levelek", + "no_primary_email_alert": "Nincs elsődleges e-mail-cím" + }, + "user_greeting": { + "error": "A felhasználó betöltése sikertelen" + }, + "user_name": { + "display_name_field_label": "Megjelenítési név" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} aktív munkamenet", + "active_sessions:other": "{{count}} aktív munkamenet", + "heading": "Hol jelentkezett be", + "no_active_sessions": { + "default": "Egyetlen alkalmazásba sincs bejelentkezve.", + "inactive_90_days": "Az összes munkamenete aktív volt az elmúlt 90 napban." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "A kód lejárt. Kérjen egy újat.", + "title": "A kód lejárt" + }, + "code_field_error": "A kód nem ismerhető fel", + "code_field_label": "6 számjegyű kód", + "code_field_wrong_shape": "A kódnak 6 számjegyűnek kell lennie", + "email_sent_alert": { + "description": "Adja meg alább az új kódot.", + "title": "Új kód küldve" + }, + "enter_code_prompt": "Adja meg az ide küldött 6 számjegyű kódot: {{email}}", + "heading": "E-mail-cím ellenőrzése", + "invalid_code_alert": { + "description": "A folytatáshoz ellenőrizze az e-mail-címére küldött kódot, és frissítse a lenti mezőket.", + "title": "Hibás kódot adott meg" + }, + "resend_code": "Kód újraküldése", + "resend_email": "Levél újraküldése", + "sent": "Elküldve!", + "unknown_email": "Ismeretlen e-mail-cím" + } + }, + "mas": { + "scope": { + "edit_profile": "Profil és elérhetőségek szerkesztése", + "manage_sessions": "Eszközök és munkamenetek kezelése", + "mas_admin": "Felhasználók kezelése (urn:mas:admin)", + "send_messages": "Új üzenetek küldése az Ön nevében", + "synapse_admin": "A kiszolgáló kezelése (urn:synapse:admin:*)", + "view_messages": "Meglévő üzenetek és adatok megtekintése", + "view_profile": "Saját profilinformációk és kapcsolati részletek megtekintése" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/nb-NO.json b/matrix-authentication-service/frontend/locales/nb-NO.json new file mode 100644 index 00000000..3835e0fa --- /dev/null +++ b/matrix-authentication-service/frontend/locales/nb-NO.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Tilbake", + "cancel": "Avbryt", + "clear": "Tøm", + "close": "Lukk", + "collapse": "Skjul", + "confirm": "Bekreft", + "continue": "Fortsett", + "edit": "Rediger", + "expand": "Utvid", + "save": "Lagre", + "save_and_continue": "Lagre og fortsett", + "sign_out": "Logg ut", + "start_over": "Begynn på nytt" + }, + "branding": { + "privacy_policy": { + "alt": "Lenke til tjenestens personvernerklæring", + "link": "Personvernerklæring" + }, + "terms_and_conditions": { + "alt": "Lenke til tjenestens vilkår og betingelser", + "link": "Vilkår og betingelser" + } + }, + "common": { + "add": "Legg til", + "e2ee": "Ende-til-ende-kryptering", + "error": "Feil", + "loading": "Laster inn...", + "next": "Neste", + "password": "Passord", + "previous": "Forrige", + "saved": "Lagret", + "saving": "Lagrer…" + }, + "frontend": { + "account": { + "account_password": "Passord for konto", + "contact_info": "Kontaktopplysninger", + "delete_account": { + "alert_description": "Denne kontoen vil bli slettet permanent, og du vil ikke lenger ha tilgang til noen av meldingene dine.", + "alert_title": "Du er i ferd med å miste alle dataene dine", + "button": "Slett konto", + "dialog_description": "Bekreft at du ønsker å slette kontoen din:\n\n\nDu vil ikke kunne reaktivere kontoen din\nDu vil ikke lenger kunne logge inn\nIngen vil kunne gjenbruke brukernavnet ditt (MXID), inkludert deg selv\nDu vil forlate alle rom og direktemeldinger du er i\nDu vil bli fjernet fra identitetsserveren, og ingen vil kunne finne deg med e-postadressen eller telefonnummeret ditt\n\nDine gamle meldinger vil fortsatt være synlige for personer som har mottatt dem. Vil du skjule dine sendte meldinger for personer som blir med i rommene i fremtiden?", + "dialog_title": "Slett denne kontoen?", + "erase_checkbox_label": "Ja, skjul alle meldingene mine for nye medlemmer", + "incorrect_password": "Feil passord, prøv igjen", + "mxid_label": "Bekreft din Matrix ID ({{ mxid }})", + "mxid_mismatch": "Denne verdien samsvarer ikke med din Matrix ID", + "password_label": "Skriv inn passordet ditt for å fortsette" + }, + "edit_profile": { + "display_name_help": "Dette er det andre vil se uansett hvor du er logget inn.", + "display_name_label": "Visningsnavn", + "title": "Rediger profil", + "username_label": "Brukernavn" + }, + "password": { + "change": "Endre passord", + "change_disabled": "Endring av passord er deaktivert av administrator.", + "label": "Passord" + }, + "sign_out": { + "button": "Logg av konto", + "dialog": "Logg ut av denne kontoen?" + }, + "title": "Din konto" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Den angitte e-postadressen er ikke tillatt av serverpolicyen.", + "title": "E-post avvist av policy" + }, + "email_denied_error": "Den angitte e-postadressen er ikke tillatt av serverpolicyen", + "email_exists_alert": { + "text": "Den angitte e-postadressen er allerede lagt til denne kontoen", + "title": "E-posten finnes allerede" + }, + "email_exists_error": "Den angitte e-postadressen er allerede lagt til denne kontoen", + "email_field_help": "Legg til en alternativ e-postadresse du kan bruke for å få tilgang til denne kontoen.", + "email_field_label": "Legg til e-post", + "email_in_use_error": "Den angitte e-postadressen er allerede i bruk", + "email_invalid_alert": { + "text": "Den angitte e-postadressen er ugyldig", + "title": "Ugyldig e-post" + }, + "email_invalid_error": "Den angitte e-postadressen er ugyldig", + "incorrect_password_error": "Feil passord, prøv igjen", + "password_confirmation": "Bekreft passordet ditt for å legge til denne e-postadressen" + }, + "app_sessions_list": { + "error": "Kunne ikke laste inn appsesjoner", + "heading": "Applikasjoner" + }, + "browser_session_details": { + "current_badge": "Nåværende", + "session_details_title": "Sesjon" + }, + "browser_sessions_overview": { + "body:one": "{{count}} aktiv sesjon", + "body:other": "{{count}} aktive sesjoner", + "heading": "Nettlesere", + "no_active_sessions": { + "default": "Du er ikke logget inn på noen nettlesere.", + "inactive_90_days": "Alle sesjonene dine har vært aktive de siste 90 dagene." + }, + "view_all_button": "Vis alle" + }, + "compat_session_detail": { + "client_details_title": "Klient informasjon", + "name": "Navn", + "session_details_title": "Sesjon" + }, + "device_type_icon_label": { + "desktop": "Skrivebord", + "mobile": "Mobil", + "pc": "Datamaskin", + "tablet": "Nettbrett", + "unknown": "Ukjent enhetstype", + "web": "Web" + }, + "email_in_use": { + "heading": "E-postadressen {{email}} er allerede i bruk." + }, + "end_session_button": { + "confirmation_modal_title": "Er du sikker på at du vil avslutte denne sesjonen?", + "text": "Fjern enheten" + }, + "error": { + "hideDetails": "Skjul detaljer", + "showDetails": "Vis detaljer", + "subtitle": "Det oppstod en uventet feil. Vennligst prøv igjen.", + "title": "Noe gikk galt" + }, + "error_boundary_title": "Noe gikk galt", + "errors": { + "field_required": "Dette feltet er obligatorisk", + "rate_limit_exceeded": "Du har kommet med for mange forespørsler på kort tid. Vent noen minutter og prøv igjen." + }, + "last_active": { + "active_date": "Aktiv {{relativeDate}}", + "active_now": "Aktiv nå", + "inactive_90_days": "Inaktiv i 90+ dager" + }, + "nav": { + "devices": "Enheter", + "plan": "Plan", + "profile": "Profil", + "sessions": "Sesjoner", + "settings": "Innstillinger" + }, + "not_found_alert_title": "Ikke funnet.", + "not_logged_in_alert": "Du er ikke innlogget.", + "oauth2_client_detail": { + "details_title": "Klient informasjon", + "id": "Klient-ID", + "name": "Navn", + "policy": "Retningslinjer", + "terms": "Vilkår for bruk" + }, + "oauth2_session_detail": { + "client_details_name": "Navn", + "client_title": "Klient informasjon", + "session_details_title": "Sesjon" + }, + "pagination_controls": { + "total": "Totalt: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Nåværende passord", + "failure": { + "description": { + "account_locked": "Kontoen din er låst og kan ikke gjenopprettes på dette tidspunktet. Hvis dette ikke er forventet, kan du kontakte serveradministratoren din.", + "expired_recovery_ticket": "Gjenopprettingslenken er utløpt. Start kontogjenopprettingsprosessen på nytt.", + "invalid_new_password": "Det nye passordet du valgte er ugyldig. Det kan hende at den ikke oppfyller den gjeldende sikkerhetspolicyen.", + "no_current_password": "Du har ikke et gjeldende passord.", + "no_such_recovery_ticket": "Gjenopprettingslenken er ugyldig. Hvis du kopierte lenken fra gjenopprettingseposten, vennligst sjekk at hele lenken ble kopiert.", + "password_changes_disabled": "Endring av passord er deaktivert.", + "recovery_ticket_already_used": "Gjenopprettingslenken er allerede brukt. Den kan ikke brukes igjen.", + "unspecified": "Dette kan være et midlertidig problem, så prøv igjen senere. Hvis problemet vedvarer, vennligst kontakt serveradministratoren din.", + "wrong_password": "Passordet du oppga som ditt nåværende passord er feil. Prøv igjen." + }, + "title": "Kunne ikke oppdatere passordet" + }, + "new_password_again_label": "Skriv inn nytt passord igjen", + "new_password_label": "Nytt passord", + "passwords_match": "Passordene stemmer overens!", + "passwords_no_match": "Passord stemmer ikke overens", + "subtitle": "Velg et nytt passord for kontoen din.", + "success": { + "description": "Passordet ditt har blitt oppdatert.", + "title": "Passord oppdatert" + }, + "title": "Bytt passordet ditt" + }, + "password_reset": { + "consumed": { + "subtitle": "For å opprette et nytt passord, start på nytt og velg «Glemt passord».", + "title": "Lenken for å tilbakestille passordet ditt har allerede blitt brukt" + }, + "expired": { + "resend_email": "Send e-post på nytt", + "subtitle": "Be om en ny e-post som vil bli sendt til: {{email}}", + "title": "Lenken for å tilbakestille passordet ditt er utløpt" + }, + "subtitle": "Velg et nytt passord for kontoen din.", + "title": "Tilbakestill passordet ditt" + }, + "password_strength": { + "placeholder": "Passordstyrke", + "score": { + "0": "Ekstremt svakt passord", + "1": "Veldig svakt passord", + "2": "Svakt passord", + "3": "Sterkt passord", + "4": "Veldig sterkt passord" + }, + "suggestion": { + "all_uppercase": "Bruk store bokstaver, men ikke for alle bokstaver.", + "another_word": "Legg til flere ord som er mindre vanlige.", + "associated_years": "Unngå år som er knyttet til deg", + "capitalization": "Bruk stor bokstav på mer enn den første bokstaven.", + "dates": "Unngå datoer og år som er knyttet til deg", + "l33t": "Unngå forutsigbare bokstavbytter som \"@\" i stedet for \"a\".", + "longer_keyboard_pattern": "Bruk lengre tastaturmønstre og endre skriveretning flere ganger.", + "no_need": "Du kan lage sterke passord uten å bruke symboler, tall eller store bokstaver.", + "pwned": "Hvis du bruker dette passordet andre steder, bør du endre det.", + "recent_years": "Unngå nylige år", + "repeated": "Unngå gjentatte ord og tegn.", + "reverse_words": "Unngå omvendt staving av vanlige ord.", + "sequences": "Unngå vanlige tegnsekvenser.", + "use_words": "Bruk flere ord, men unngå vanlige fraser." + }, + "too_weak": "Dette passordet er for svakt", + "warning": { + "common": "Dette er et ofte brukt passord.", + "common_names": "Vanlige navn og etternavn er lette å gjette seg til.", + "dates": "Datoer er enkle å gjette seg til.", + "extended_repeat": "Gjentatte tegnmønstre som \"abcabcabc\" er lette å gjette seg til.", + "key_pattern": "Korte tastaturmønstre er enkle å gjette.", + "names_by_themselves": "Enkeltnavn eller etternavn er lette å gjette.", + "pwned": "Passordet ditt ble eksponert ved et datainnbrudd på Internett.", + "recent_years": "De siste årene er enkle å gjette seg til.", + "sequences": "Vanlige tegnsekvenser som «abc» er enkle å gjette.", + "similar_to_common": "Dette ligner på et ofte brukt passord.", + "simple_repeat": "Gjentatte tegn som \"aaa\" er lette å gjette.", + "straight_row": "Rette tasterader på tastaturet er enkle å gjette seg til.", + "top_hundred": "Dette er et ofte brukt passord.", + "top_ten": "Dette er et mye brukt passord.", + "user_inputs": "Det skal ikke være noen personlige eller siderelaterte data.", + "word_by_itself": "Enkeltord er lette å gjette." + } + }, + "reset_cross_signing": { + "button": "Tilbakestill identitet", + "cancelled": { + "description_1": "Du kan lukke dette vinduet og gå tilbake til appen for å fortsette.", + "description_2": "Hvis du er logget av overalt og ikke husker gjenopprettingskoden, må du fortsatt tilbakestille identiteten din.", + "heading": "Tilbakestilling av identitet kansellert." + }, + "description": "Hvis du ikke er logget på andre enheter, og du har mistet gjenopprettingsnøkkelen, må du tilbakestille identiteten din for å fortsette å bruke appen.", + "effect_list": { + "negative_1": "Du vil miste din eksisterende meldingshistorikk", + "negative_2": "Du må bekrefte alle eksisterende enheter og kontakter på nytt", + "neutral_1": "Du vil miste all meldingshistorikk som bare er lagret på serveren", + "neutral_2": "Du må bekrefte alle eksisterende enheter og kontakter på nytt", + "positive_1": "Dine kontodetaljer, kontakter, preferanser og chatteliste vil bli beholdt" + }, + "failure": { + "description": "Dette kan være et midlertidig problem, så prøv igjen senere. Hvis problemet vedvarer, vennligst kontakt serveradministratoren din.", + "heading": "Kunne ikke tillate tilbakestilling av kryptoidentitet", + "title": "Kunne ikke tillate kryptoidentitet" + }, + "finish_reset": "Fullfør tilbakestillingen", + "heading": "Tilbakestill identiteten din i tilfelle du ikke kan bekrefte på en annen måte", + "start_reset": "Start tilbakestilling", + "success": { + "description": "Tilbakestillingen av identiteten er godkjent for de neste {{minutes}} minuttene. Du kan lukke dette vinduet og gå tilbake til appen for å fortsette.", + "heading": "Identitet tilbakestilt. Gå tilbake til appen for å fullføre prosessen.", + "title": "Tilbakestilling av kryptoidentitet midlertidig tillatt" + }, + "warning": "Tilbakestill identiteten din bare hvis du ikke har tilgang til en annen pålogget enhet og du har mistet gjenopprettingsnøkkelen." + }, + "selectable_session": { + "label": "Velg sesjon" + }, + "session": { + "client_id_label": "Klient-ID", + "current": "Nåværende", + "current_badge": "Nåværende", + "device_id_label": "Enhets-ID", + "finished_date": "Fullført ", + "finished_label": "Fullført", + "generic_browser_session": "Nettlesersesjon", + "id_label": "ID", + "ip_label": "IP-adresse", + "last_active_label": "Sist aktiv", + "last_auth_label": "Siste autentisering", + "name_for_platform": "{{name}} for {{platform}}", + "scopes_label": "Omfang", + "set_device_name": { + "help": "Angi et navn som hjelper deg med å identifisere denne enheten.", + "label": "Navn på enhet", + "title": "Rediger navnet på enheten" + }, + "signed_in_date": "Logget på ", + "signed_in_label": "Logget på", + "title": "Detaljer om enheten", + "unknown_browser": "Ukjent nettleser", + "unknown_device": "Ukjent enhet", + "uri_label": "Uri", + "user_id_label": "Bruker ID", + "username_label": "Brukernavn" + }, + "session_detail": { + "alert": { + "button": "Gå tilbake", + "text": "Denne sesjonen finnes ikke, eller er ikke lenger aktiv.", + "title": "Finner ikke sesjonen: {{deviceId}}" + } + }, + "unknown_route": "Ukjent rute {{route}}", + "unverified_email_alert": { + "button": "Gjennomgå og verifiser", + "text:one": "Du har {{count}} ubekreftet e-postadresse.", + "text:other": "Du har {{count}} ubekreftede e-postadresser.", + "title": "Ubekreftet e-post" + }, + "user_email": { + "cant_delete_primary": "Velg en annen primær e-postadresse for å slette denne.", + "delete_button_confirmation_modal": { + "action": "Slett e-post", + "body": "Vil du slette denne e-posten?", + "incorrect_password": "Feil passord, prøv igjen", + "password_confirmation": "Bekreft kontopassordet ditt for å slette denne e-postadressen" + }, + "delete_button_title": "Fjern e-postadresse", + "email": "E-post", + "make_primary_button": "Gjøre til primær", + "not_verified": "Ikke verifisert", + "primary_email": "Primær e-postadresse", + "retry_button": "Send kode på nytt", + "unverified": "Ikke verifisert" + }, + "user_email_list": { + "heading": "E-poster", + "no_primary_email_alert": "Ingen primær e-postadresse" + }, + "user_greeting": { + "error": "Kunne ikke laste inn bruker" + }, + "user_name": { + "display_name_field_label": "Visningsnavn" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} aktiv sesjon", + "active_sessions:other": "{{count}} aktive sesjoner", + "heading": "Hvor du er logget inn", + "no_active_sessions": { + "default": "Du er ikke logget på noen applikasjoner.", + "inactive_90_days": "Alle sesjonene dine har vært aktive de siste 90 dagene." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Koden er utløpt. Be om en ny kode.", + "title": "Koden er utløpt" + }, + "code_field_error": "Kode ikke gjenkjent", + "code_field_label": "6-sifret kode", + "code_field_wrong_shape": "Koden må være 6 sifre", + "email_sent_alert": { + "description": "Skriv inn den nye koden nedenfor.", + "title": "Ny kode sendt" + }, + "enter_code_prompt": "Skriv inn den 6-sifrede koden sendt til: {{email}}", + "heading": "Bekreft e-postadressen din", + "invalid_code_alert": { + "description": "Sjekk koden som er sendt til e-posten din, og oppdater feltene nedenfor for å fortsette.", + "title": "Du skrev inn feil kode" + }, + "resend_code": "Send kode på nytt", + "resend_email": "Send e-post på nytt", + "sent": "Sendt!", + "unknown_email": "Ukjent e-postadresse" + } + }, + "mas": { + "scope": { + "edit_profile": "Rediger din profil og kontaktdetaljer", + "manage_sessions": "Administrer enhetene og sesjonene dine", + "mas_admin": "Administrer brukere (urn:mas:admin)", + "send_messages": "Send nye meldinger på dine vegne", + "synapse_admin": "Administrer serveren (urn:synapse:admin:*)", + "view_messages": "Se dine eksisterende meldinger og data", + "view_profile": "Se din profilinformasjon og kontaktdetaljer" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/nl.json b/matrix-authentication-service/frontend/locales/nl.json new file mode 100644 index 00000000..15b643f0 --- /dev/null +++ b/matrix-authentication-service/frontend/locales/nl.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Terug", + "cancel": "Annuleren", + "clear": "Wissen", + "close": "Sluiten", + "collapse": "Collapse", + "confirm": "Confirm", + "continue": "Doorgaan", + "edit": "Bewerken", + "expand": "Expand", + "save": "Opslaan", + "save_and_continue": "Save and continue", + "sign_out": "Uitloggen", + "start_over": "Opnieuw beginnen" + }, + "branding": { + "privacy_policy": { + "alt": "Link naar het privacybeleid van de service", + "link": "Privacybeleid" + }, + "terms_and_conditions": { + "alt": "Link naar de servicevoorwaarden", + "link": "Algemene voorwaarden" + } + }, + "common": { + "add": "Toevoegen", + "e2ee": "End-to-end-encryptie", + "error": "Fout", + "loading": "Laden...", + "next": "Volgende", + "password": "Wachtwoord", + "previous": "Vorige", + "saved": "Opgeslagen", + "saving": "Opslaan..." + }, + "frontend": { + "account": { + "account_password": "Account password", + "contact_info": "Contact info", + "delete_account": { + "alert_description": "This account will be permanently erased and you’ll no longer have access to any of your messages.", + "alert_title": "You’re about to lose all of your data", + "button": "Delete account", + "dialog_description": "Confirm that you would like to delete your account:\n\n\nYou will not be able to reactivate your account\nYou will no longer be able to sign in\nNo one will be able to reuse your username (MXID), including you\nYou will leave all rooms and direct messages you are in\nYou will be removed from the identity server, and no one will be able to find you with your email or phone number\n\nYour old messages will still be visible to people who received them. Would you like to hide your sent messages from people who join rooms in the future?", + "dialog_title": "Delete this account?", + "erase_checkbox_label": "Yes, hide all my messages from new joiners", + "incorrect_password": "Incorrect password, please try again", + "mxid_label": "Confirm your Matrix ID ({{ mxid }})", + "mxid_mismatch": "This value does not match your Matrix ID", + "password_label": "Enter your password to continue" + }, + "edit_profile": { + "display_name_help": "Dit is wat anderen zien wanneer je bent ingelogd.", + "display_name_label": "Weergavenaam", + "title": "Profiel bewerken", + "username_label": "Gebruikersnaam" + }, + "password": { + "change": "Wachtwoord wijzigen", + "change_disabled": "Password changes are disabled by the administrator.", + "label": "Wachtwoord" + }, + "sign_out": { + "button": "Sign out of account", + "dialog": "Sign out of this account?" + }, + "title": "Uw account" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Het ingevoerde e-mailadres is niet toegestaan volgens het serverbeleid.", + "title": "E-mailadres geweigerd door het beleid" + }, + "email_denied_error": "The entered email is not allowed by the server policy", + "email_exists_alert": { + "text": "Het ingevoerde e-mailadres is al toegevoegd aan dit account", + "title": "E-mailadres bestaat al" + }, + "email_exists_error": "Het ingevoerde e-mailadres is al toegevoegd aan dit account", + "email_field_help": "Voeg een alternatief e-mailadres toe dat u kunt gebruiken om toegang tot dit account te krijgen.", + "email_field_label": "E-mailadres toevoegen", + "email_in_use_error": "The entered email is already in use", + "email_invalid_alert": { + "text": "Het ingevoerde e-mailadres is ongeldig", + "title": "Ongeldig e-mailadres" + }, + "email_invalid_error": "Het ingevoerde e-mailadres is ongeldig", + "incorrect_password_error": "Incorrect password, please try again", + "password_confirmation": "Confirm your account password to add this email address" + }, + "app_sessions_list": { + "error": "Kan app-sessies niet laden", + "heading": "Apps" + }, + "browser_session_details": { + "current_badge": "Huidige", + "session_details_title": "Sessie" + }, + "browser_sessions_overview": { + "body:one": "{{count}} actieve sessie", + "body:other": "{{count}} actieve sessies", + "heading": "Browsers", + "no_active_sessions": { + "default": "You are not signed in to any web browsers.", + "inactive_90_days": "All your sessions have been active in the last 90 days." + }, + "view_all_button": "Bekijk alles" + }, + "compat_session_detail": { + "client_details_title": "Client", + "name": "Naam", + "session_details_title": "Sessie" + }, + "device_type_icon_label": { + "desktop": "Desktop", + "mobile": "Mobiel", + "pc": "Computer", + "tablet": "Tablet", + "unknown": "Onbekend apparaattype", + "web": "Web" + }, + "email_in_use": { + "heading": "The email address {{email}} is already in use." + }, + "end_session_button": { + "confirmation_modal_title": "Weet u zeker dat u deze sessie wilt beëindigen?", + "text": "Uitloggen" + }, + "error": { + "hideDetails": "Verberg details", + "showDetails": "Details weergeven", + "subtitle": "Er is een onverwachte fout opgetreden. Probeer het opnieuw.", + "title": "Er is iets misgegaan" + }, + "error_boundary_title": "Er is iets misgegaan", + "errors": { + "field_required": "This field is required", + "rate_limit_exceeded": "You've made too many requests in a short period. Please wait a few minutes and try again." + }, + "last_active": { + "active_date": "Actief {{relativeDate}}", + "active_now": "Nu actief", + "inactive_90_days": "Meer dan 90 dagen inactief" + }, + "nav": { + "devices": "Apparaten", + "plan": "Plan", + "profile": "Profiel", + "sessions": "Sessies", + "settings": "Instellingen" + }, + "not_found_alert_title": "Niet gevonden.", + "not_logged_in_alert": "U bent niet ingelogd.", + "oauth2_client_detail": { + "details_title": "Client", + "id": "Client ID", + "name": "Naam", + "policy": "Beleid", + "terms": "Servicevoorwaarden" + }, + "oauth2_session_detail": { + "client_details_name": "Naam", + "client_title": "Client", + "session_details_title": "Sessie" + }, + "pagination_controls": { + "total": "Totaal: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Huidig wachtwoord", + "failure": { + "description": { + "account_locked": "Your account is locked and can not be recovered at this time. If this is not expected, please contact your server administrator.", + "expired_recovery_ticket": "The recovery link has expired. Please start the account recovery process again from the start.", + "invalid_new_password": "The new password you chose is invalid; it may not meet the configured security policy.", + "no_current_password": "You don't have a current password.", + "no_such_recovery_ticket": "The recovery link is invalid. If you copied the link from the recovery e-mail, please check the full link was copied.", + "password_changes_disabled": "Password changes are disabled.", + "recovery_ticket_already_used": "The recovery link has already been used. It cannot be used again.", + "unspecified": "This might be a temporary problem, so please try again later. If the problem persists, please contact your server administrator.", + "wrong_password": "The password you supplied as your current password is incorrect. Please try again." + }, + "title": "Failed to update password" + }, + "new_password_again_label": "Enter new password again", + "new_password_label": "New password", + "passwords_match": "Wachtwoorden komen overeen!", + "passwords_no_match": "Passwords don't match", + "subtitle": "Choose a new password for your account.", + "success": { + "description": "Your password has been updated successfully.", + "title": "Password updated" + }, + "title": "Change your password" + }, + "password_reset": { + "consumed": { + "subtitle": "To create a new password, start over and select ”Forgot password“.", + "title": "The link to reset your password has already been used" + }, + "expired": { + "resend_email": "Resend email", + "subtitle": "Request a new email that will be sent to: {{email}}", + "title": "The link to reset your password has expired" + }, + "subtitle": "Choose a new password for your account.", + "title": "Reset your password" + }, + "password_strength": { + "placeholder": "Password strength", + "score": { + "0": "Extremely weak password", + "1": "Very weak password", + "2": "Weak password", + "3": "Strong password", + "4": "Very strong password" + }, + "suggestion": { + "all_uppercase": "Capitalise some, but not all letters.", + "another_word": "Add more words that are less common.", + "associated_years": "Avoid years that are associated with you.", + "capitalization": "Capitalise more than the first letter.", + "dates": "Avoid dates and years that are associated with you.", + "l33t": "Avoid predictable letter substitutions like '@' for 'a'.", + "longer_keyboard_pattern": "Use longer keyboard patterns and change typing direction multiple times.", + "no_need": "You can create strong passwords without using symbols, numbers, or uppercase letters.", + "pwned": "If you use this password elsewhere, you should change it.", + "recent_years": "Avoid recent years.", + "repeated": "Avoid repeated words and characters.", + "reverse_words": "Avoid reversed spellings of common words.", + "sequences": "Avoid common character sequences.", + "use_words": "Use multiple words, but avoid common phrases." + }, + "too_weak": "This password is too weak", + "warning": { + "common": "This is a commonly used password.", + "common_names": "Common names and surnames are easy to guess.", + "dates": "Dates are easy to guess.", + "extended_repeat": "Repeated character patterns like \"abcabcabc\" are easy to guess.", + "key_pattern": "Short keyboard patterns are easy to guess.", + "names_by_themselves": "Single names or surnames are easy to guess.", + "pwned": "Your password was exposed by a data breach on the Internet.", + "recent_years": "Recent years are easy to guess.", + "sequences": "Common character sequences like \"abc\" are easy to guess.", + "similar_to_common": "This is similar to a commonly used password.", + "simple_repeat": "Repeated characters like \"aaa\" are easy to guess.", + "straight_row": "Straight rows of keys on your keyboard are easy to guess.", + "top_hundred": "This is a frequently used password.", + "top_ten": "This is a heavily used password.", + "user_inputs": "There should not be any personal or page related data.", + "word_by_itself": "Single words are easy to guess." + } + }, + "reset_cross_signing": { + "button": "Identiteit resetten", + "cancelled": { + "description_1": "You can close this window and go back to the app to continue.", + "description_2": "If you're signed out everywhere and don't remember your recovery code, you'll still need to reset your identity.", + "heading": "Identity reset cancelled." + }, + "description": "Als u niet bent aangemeld bij andere apparaten en u bent uw herstelsleutel kwijt, moet u uw identiteit opnieuw instellen om de app te kunnen blijven gebruiken.", + "effect_list": { + "negative_1": "You will lose your existing message history", + "negative_2": "You will need to verify all your existing devices and contacts again", + "neutral_1": "You will lose any message history that's stored only on the server", + "neutral_2": "You will need to verify all your existing devices and contacts again", + "positive_1": "Your account details, contacts, preferences, and chat list will be kept" + }, + "failure": { + "description": "This might be a temporary problem, so please try again later. If the problem persists, please contact your server administrator.", + "heading": "Failed to allow crypto identity reset", + "title": "Kan crypto identiteit niet toestaan" + }, + "finish_reset": "Finish reset", + "heading": "Stel uw identiteit opnieuw in als u op een andere manier niet kunt bevestigen", + "start_reset": "Start reset", + "success": { + "description": "De identiteits reset is goedgekeurd voor de volgende {{minutes}} minuten. U kunt dit venster sluiten en teruggaan naar de app om door te gaan.", + "heading": "Identity reset successfully. Go back to the app to finish the process.", + "title": "Het opnieuw instellen van de crypto identiteit is tijdelijk toegestaan" + }, + "warning": "Only reset your identity if you don't have access to another signed-in device and you've lost your recovery key." + }, + "selectable_session": { + "label": "Sessie selecteren" + }, + "session": { + "client_id_label": "Client ID", + "current": "Huidige", + "current_badge": "Huidige", + "device_id_label": "Apparaat-ID", + "finished_date": "Voltooid op ", + "finished_label": "Voltooid", + "generic_browser_session": "Browser session", + "id_label": "ID", + "ip_label": "IP-adres", + "last_active_label": "Laatst actief", + "last_auth_label": "Laatste authenticatie", + "name_for_platform": "{{name}} voor {{platform}}", + "scopes_label": "Scopes", + "set_device_name": { + "help": "Set a name that will help you identify this device.", + "label": "Device name", + "title": "Edit device name" + }, + "signed_in_date": "Ingelogd op ", + "signed_in_label": "Ingelogd", + "title": "Device details", + "unknown_browser": "Onbekende browser", + "unknown_device": "Onbekend apparaat", + "uri_label": "Uri", + "user_id_label": "Gebruikers-ID", + "username_label": "Gebruikersnaam" + }, + "session_detail": { + "alert": { + "button": "Terug", + "text": "Deze sessie bestaat niet of is niet langer actief.", + "title": "Kan sessie niet vinden: {{deviceId}}" + } + }, + "unknown_route": "Onbekende route {{route}}", + "unverified_email_alert": { + "button": "Controleren en verifiëren", + "text:one": "You have {{count}} unverified email address.", + "text:other": "You have {{count}} unverified email addresses.", + "title": "Niet geverifieerd e-mailadres" + }, + "user_email": { + "cant_delete_primary": "Kies een ander primair e-mailadres om de huidige te verwijderen.", + "delete_button_confirmation_modal": { + "action": "E-mailadres verwijderen", + "body": "Dit e-mailadres verwijderen?", + "incorrect_password": "Incorrect password, please try again", + "password_confirmation": "Confirm your account password to delete this email address" + }, + "delete_button_title": "E-mailadres verwijderen", + "email": "E-mailadres", + "make_primary_button": "Instellen als primair", + "not_verified": "Niet geverifieerd", + "primary_email": "Primair e-mailadres", + "retry_button": "Code opnieuw verzenden", + "unverified": "Niet geverifieerd" + }, + "user_email_list": { + "heading": "E-mailadressen", + "no_primary_email_alert": "Geen primair e-mailadres" + }, + "user_greeting": { + "error": "Het laden van de gebruiker is mislukt" + }, + "user_name": { + "display_name_field_label": "Weergavenaam" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} actieve sessie", + "active_sessions:other": "{{count}} actieve sessies", + "heading": "Where you're signed in", + "no_active_sessions": { + "default": "You are not signed in to any application.", + "inactive_90_days": "All your sessions have been active in the last 90 days." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "The code has expired. Please request a new code.", + "title": "Code expired" + }, + "code_field_error": "Code not recognised", + "code_field_label": "6-cijferige code", + "code_field_wrong_shape": "Code must be 6 digits", + "email_sent_alert": { + "description": "Enter the new code below.", + "title": "New code sent" + }, + "enter_code_prompt": "Enter the 6-digit code sent to: {{email}}", + "heading": "Verifieer uw e-mailadres", + "invalid_code_alert": { + "description": "Check the code sent to your email and update the fields below to continue.", + "title": "You entered the wrong code" + }, + "resend_code": "Code opnieuw verzenden", + "resend_email": "Resend email", + "sent": "Verzonden!", + "unknown_email": "Unknown email" + } + }, + "mas": { + "scope": { + "edit_profile": "Edit your profile and contact details", + "manage_sessions": "Manage your devices and sessions", + "mas_admin": "Manage users (urn:mas:admin)", + "send_messages": "Send new messages on your behalf", + "synapse_admin": "Administer the server (urn:synapse:admin:*)", + "view_messages": "View your existing messages and data", + "view_profile": "See your profile info and contact details" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/pl.json b/matrix-authentication-service/frontend/locales/pl.json new file mode 100644 index 00000000..b2bfb0dc --- /dev/null +++ b/matrix-authentication-service/frontend/locales/pl.json @@ -0,0 +1,404 @@ +{ + "action": { + "back": "Powrót", + "cancel": "Anuluj", + "clear": "Wyczyść", + "close": "Zamknij", + "collapse": "Zwiń", + "confirm": "Potwierdź", + "continue": "Kontynuuj", + "edit": "Edytuj", + "expand": "Rozwiń", + "save": "Zapisz", + "save_and_continue": "Zapisz i kontynuuj", + "sign_out": "Wyloguj", + "start_over": "Zacznij od nowa" + }, + "branding": { + "privacy_policy": { + "alt": "Link do polityki prywatności serwisu", + "link": "Polityka prywatności" + }, + "terms_and_conditions": { + "alt": "Link do warunków korzystania z usługi", + "link": "Warunki korzystania" + } + }, + "common": { + "add": "Dodaj", + "e2ee": "Szyfrowanie typu end-to-end", + "error": "Błąd", + "loading": "Wczytywanie…", + "next": "Dalej", + "password": "Hasło", + "previous": "Poprzedni", + "saved": "Zapisano", + "saving": "Zapisywanie…" + }, + "frontend": { + "account": { + "account_password": "Hasło do konta", + "contact_info": "Dane kontaktowe", + "delete_account": { + "alert_description": "To konto zostanie trwale usunięte i nie będziesz już mieć dostępu do żadnych wiadomości.", + "alert_title": "Za chwilę stracisz wszystkie swoje dane", + "button": "Usuń konto", + "dialog_description": "Potwierdź, że chcesz usunąć swoje konto:\n\n\nNie będziesz mógł ponownie aktywować swojego konta\nNie będziesz już mógł się zalogować\n Nikt nie będzie mógł ponownie wykorzystać Twojej nazwy użytkownika (MXID), łącznie z Tobą\nOpuścisz wszystkie pokoje i wiadomości bezpośrednie, w których uczestniczysz\nZostaniesz usunięty z serwera tożsamości i nikt nie będzie mógł Cię znaleźć po Twoim adresie e-mail ani numerze telefonu\n\nTwoje stare wiadomości nadal będą widoczne dla osób, które je otrzymały. Czy chcesz ukryć wysłane wiadomości przed osobami, które dołączą do pokoi w przyszłości?", + "dialog_title": "Usunąć to konto?", + "erase_checkbox_label": "Tak, ukryj wszystkie moje wiadomości przed nowymi użytkownikami", + "incorrect_password": "Niepoprawne hasło. Spróbuj ponownie.", + "mxid_label": "Potwierdź swój identyfikator Matrix ({{ mxid }})", + "mxid_mismatch": "Ta wartość nie pasuje do Twojego identyfikatora Matrix", + "password_label": "Wprowadź hasło, aby kontynuować" + }, + "edit_profile": { + "display_name_help": "To właśnie zobaczą inni użytkownicy, niezależnie od tego, gdzie jesteś zalogowany.", + "display_name_label": "Nazwa wyświetlana", + "title": "Edytuj profil", + "username_label": "Nazwa użytkownika" + }, + "password": { + "change": "Zmień hasło", + "change_disabled": "Zmiany hasła są wyłączone przez administratora.", + "label": "Hasło" + }, + "sign_out": { + "button": "Wyloguj się z konta", + "dialog": "Wylogować się z tego konta?" + }, + "title": "Twoje konto" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Wprowadzony adres e-mail nie jest dozwolony przez politykę serwera.", + "title": "E-mail odrzucony zgodnie z polityką" + }, + "email_denied_error": "Podany adres e-mail nie jest dozwolony przez politykę serwera", + "email_exists_alert": { + "text": "Podany adres e-mail został już dodany do tego konta", + "title": "Adres e-mail już istnieje" + }, + "email_exists_error": "Podany adres e-mail został już dodany do tego konta", + "email_field_help": "Dodaj alternatywny adres e-mail, którego możesz używać do uzyskania dostępu do tego konta.", + "email_field_label": "Dodaj adres e-mail", + "email_in_use_error": "Podany adres e-mail jest już używany", + "email_invalid_alert": { + "text": "Podany adres e-mail jest nieprawidłowy", + "title": "Nieprawidłowy adres e-mail" + }, + "email_invalid_error": "Podany adres e-mail jest nieprawidłowy", + "incorrect_password_error": "Niepoprawne hasło. Spróbuj ponownie.", + "password_confirmation": "Potwierdź hasło do swojego konta, aby dodać ten adres e-mail" + }, + "app_sessions_list": { + "error": "Nie udało się załadować sesji aplikacji", + "heading": "Aplikacje" + }, + "browser_session_details": { + "current_badge": "Aktualny", + "session_details_title": "Sesja" + }, + "browser_sessions_overview": { + "body:one": "{{count}} aktywna sesja", + "body:few": "{{count}} aktywne sesje", + "body:many": "{{count}} aktywnych sesji", + "heading": "Przeglądarki", + "no_active_sessions": { + "default": "Nie jesteś zalogowany w żadnej przeglądarce internetowej.", + "inactive_90_days": "Wszystkie Twoje sesje były aktywne w ciągu ostatnich 90 dni." + }, + "view_all_button": "Zobacz wszystko" + }, + "compat_session_detail": { + "client_details_title": "Informacje o kliencie", + "name": "Nazwa", + "session_details_title": "Sesja" + }, + "device_type_icon_label": { + "desktop": "Pulpit", + "mobile": "Telefon", + "pc": "Komputer", + "tablet": "Tablet", + "unknown": "Nieznany typ urządzenia", + "web": "Przeglądarka" + }, + "email_in_use": { + "heading": "Adres e-mail {{email}} jest już używany." + }, + "end_session_button": { + "confirmation_modal_title": "Czy na pewno chcesz zakończyć tę sesję?", + "text": "Usuń urządzenie" + }, + "error": { + "hideDetails": "Ukryj szczegóły", + "showDetails": "Pokaż szczegóły", + "subtitle": "Wystąpił nieoczekiwany błąd. Spróbuj ponownie.", + "title": "Coś poszło nie tak" + }, + "error_boundary_title": "Coś poszło nie tak", + "errors": { + "field_required": "To pole jest wymagane", + "rate_limit_exceeded": "W krótkim czasie wysłałeś zbyt wiele żądań. Poczekaj kilka minut i spróbuj ponownie." + }, + "last_active": { + "active_date": "Aktywne {{relativeDate}}", + "active_now": "Aktywne teraz", + "inactive_90_days": "Nieaktywny przez ponad 90 dni" + }, + "nav": { + "devices": "Urządzenia", + "plan": "Plan", + "profile": "Profil", + "sessions": "Sesje", + "settings": "Ustawienia" + }, + "not_found_alert_title": "Nie znaleziono.", + "not_logged_in_alert": "Nie jesteś zalogowany.", + "oauth2_client_detail": { + "details_title": "Informacje o kliencie", + "id": "Identyfikator klienta", + "name": "Nazwa", + "policy": "Polityka", + "terms": "Warunki korzystania z usługi" + }, + "oauth2_session_detail": { + "client_details_name": "Nazwa", + "client_title": "Informacje o kliencie", + "session_details_title": "Sesja" + }, + "pagination_controls": { + "total": "Razem: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Aktualne hasło", + "failure": { + "description": { + "account_locked": "Twoje konto jest zablokowane i nie można go obecnie odzyskać. Jeśli nie jest to oczekiwane, skontaktuj się z administratorem serwera.", + "expired_recovery_ticket": "Link do odzyskiwania wygasł. Rozpocznij proces odzyskiwania konta od początku.", + "invalid_new_password": "Wybrane nowe hasło jest nieprawidłowe i może nie spełniać skonfigurowanych zasad bezpieczeństwa.", + "no_current_password": "Nie masz aktualnego hasła.", + "no_such_recovery_ticket": "Link odzyskiwania jest nieprawidłowy. Jeśli skopiowałeś link z e-maila odzyskiwania, sprawdź, czy został skopiowany w całości.", + "password_changes_disabled": "Możliwość zmiany hasła jest wyłączona.", + "recovery_ticket_already_used": "Link odzyskiwania został już użyty. Nie można go użyć ponownie.", + "unspecified": "To może być problem tymczasowy, więc spróbuj ponownie później. Jeśli problem będzie się powtarzał, skontaktuj się z administratorem serwera.", + "wrong_password": "Podane hasło jest nieprawidłowe. Spróbuj ponownie." + }, + "title": "Nie udało się zaktualizować hasła" + }, + "new_password_again_label": "Wprowadź ponownie nowe hasło", + "new_password_label": "Nowe hasło", + "passwords_match": "Hasła są zgodne!", + "passwords_no_match": "Hasła nie pasują", + "subtitle": "Wybierz nowe hasło do swojego konta.", + "success": { + "description": "Twoje hasło zostało pomyślnie zaktualizowane.", + "title": "Zaktualizowano hasło" + }, + "title": "Zmień swoje hasło" + }, + "password_reset": { + "consumed": { + "subtitle": "Aby utworzyć nowe hasło, zacznij od nowa i wybierz „Zapomniałem hasła”.", + "title": "Link do zresetowania hasła został już użyty" + }, + "expired": { + "resend_email": "Wyślij ponownie wiadomość e-mail", + "subtitle": "Poproś o nową wiadomość e-mail, która zostanie wysłana na adres: {{email}}", + "title": "Link do resetowania hasła wygasł" + }, + "subtitle": "Wybierz nowe hasło do swojego konta.", + "title": "Zresetuj swoje hasło" + }, + "password_strength": { + "placeholder": "Siła hasła", + "score": { + "0": "Bardzo słabe hasło", + "1": "Bardzo słabe hasło", + "2": "Słabe hasło", + "3": "Silne hasło", + "4": "Bardzo silne hasło" + }, + "suggestion": { + "all_uppercase": "Pisz wielką literą niektóre, ale nie wszystkie litery.", + "another_word": "Dodaj więcej słów, które są mniej popularne.", + "associated_years": "Unikaj lat, które kojarzą się z Tobą.", + "capitalization": "Wielką literą pisz więcej niż tylko pierwszą literę.", + "dates": "Unikaj dat i lat, które są związane z Tobą.", + "l33t": "Unikaj przewidywalnych zamian liter, takich jak „@” zamiast „a”.", + "longer_keyboard_pattern": "Używaj dłuższych sekwencji klawiszy i wielokrotnie zmieniaj kierunek pisania.", + "no_need": "Możesz tworzyć silne hasła bez używania symboli, cyfr i wielkich liter.", + "pwned": "Jeżeli używasz tego hasła gdzie indziej, powinieneś je zmienić.", + "recent_years": "Unikaj ostatnich lat.", + "repeated": "Unikaj powtarzających się słów i znaków.", + "reverse_words": "Unikaj odwróconych pisowni popularnych słów.", + "sequences": "Unikaj popularnych sekwencji znaków.", + "use_words": "Używaj wielu słów, ale unikaj popularnych zwrotów." + }, + "too_weak": "To hasło jest zbyt słabe", + "warning": { + "common": "To jest powszechnie używane hasło.", + "common_names": "Popularne imiona i nazwiska są łatwe do odgadnięcia.", + "dates": "Daty są łatwe do odgadnięcia.", + "extended_repeat": "Powtarzające się wzory znaków, takie jak „abcabcabc”, są łatwe do odgadnięcia.", + "key_pattern": "Krótkie wzory klawiszowe są łatwe do odgadnięcia", + "names_by_themselves": "Pojedyncze imiona lub nazwiska są łatwe do odgadnięcia.", + "pwned": "Twoje hasło zostało ujawnione w wyniku naruszenia bezpieczeństwa danych w Internecie.", + "recent_years": "Ostatnie lata są łatwe do odgadnięcia.", + "sequences": "Typowe sekwencje znaków, takie jak „abc”, są łatwe do odgadnięcia.", + "similar_to_common": "Jest to podobne do powszechnie używanego hasła.", + "simple_repeat": "Powtarzające się znaki, takie jak „aaa”, są łatwe do odgadnięcia.", + "straight_row": "Proste rzędy klawiszy na klawiaturze są łatwe do odgadnięcia.", + "top_hundred": "To jest często używane hasło.", + "top_ten": "To jest bardzo często używane hasło.", + "user_inputs": "Nie powinno być żadnych danych osobowych ani związanych ze stroną.", + "word_by_itself": "Pojedyncze słowa są łatwe do odgadnięcia." + } + }, + "reset_cross_signing": { + "button": "Zresetuj tożsamość", + "cancelled": { + "description_1": "Możesz zamknąć to okno i wrócić do aplikacji, aby kontynuować.", + "description_2": "Jeśli wylogowałeś się z dowolnego miejsca i nie pamiętasz kodu odzyskiwania, nadal musisz zresetować swoją tożsamość.", + "heading": "Resetowanie tożsamości zostało anulowane." + }, + "description": "Jeśli nie zalogowałeś się na żadnym innym urządzeniu i utraciłeś klucz odzyskiwania, musisz zresetować swoją tożsamość, aby móc nadal korzystać z aplikacji.", + "effect_list": { + "negative_1": "Utracisz istniejącą historię wiadomości", + "negative_2": "Będziesz musiał ponownie zweryfikować wszystkie swoje istniejące urządzenia i kontakty", + "neutral_1": "Utracisz całą historię wiadomości przechowywaną wyłącznie na serwerze", + "neutral_2": "Będziesz musiał ponownie zweryfikować wszystkie swoje istniejące urządzenia i kontakty", + "positive_1": "Twoje dane konta, kontakty, preferencje i lista czatów zostaną zachowane" + }, + "failure": { + "description": "To może być problem tymczasowy, więc spróbuj ponownie później. Jeśli problem będzie się powtarzał, skontaktuj się z administratorem serwera.", + "heading": "Nie udało się zezwolić na zresetowanie tożsamości kryptograficznej", + "title": "Nie udało się zezwolić na tożsamość kryptograficzną" + }, + "finish_reset": "Zakończ resetowanie", + "heading": "Zresetuj swoją tożsamość, jeśli nie możesz potwierdzić jej w inny sposób", + "start_reset": "Rozpocznij resetowanie", + "success": { + "description": "Resetowanie tożsamości zostało zatwierdzone na najbliższe {{minutes}} minut. Możesz zamknąć to okno i wrócić do aplikacji, aby kontynuować.", + "heading": "Tożsamość została zresetowana pomyślnie. Wróć do aplikacji, aby dokończyć proces.", + "title": "Resetowanie tożsamości kryptograficznej tymczasowo dozwolone" + }, + "warning": "Zresetuj swoją tożsamość tylko wtedy, gdy nie masz dostępu do innego urządzenia, na którym jesteś zalogowany i utraciłeś klucz odzyskiwania." + }, + "selectable_session": { + "label": "Wybierz sesję" + }, + "session": { + "client_id_label": "Identyfikator klienta", + "current": "Aktualny", + "current_badge": "Aktualny", + "device_id_label": "Identyfikator urządzenia", + "finished_date": "Zakończone ", + "finished_label": "Zakończone", + "generic_browser_session": "Sesja przeglądarki", + "id_label": "ID", + "ip_label": "Adres IP", + "last_active_label": "Ostatnio aktywny", + "last_auth_label": "Ostatnie uwierzytelnienie", + "name_for_platform": "{{name}} dla {{platform}}", + "scopes_label": "Zakresy", + "set_device_name": { + "help": "Ustaw nazwę, która ułatwi identyfikację tego urządzenia.", + "label": "Nazwa urządzenia", + "title": "Edytuj nazwę urządzenia" + }, + "signed_in_date": "Zalogowano ", + "signed_in_label": "Zalogowano", + "title": "Szczegóły urządzenia", + "unknown_browser": "Nieznana przeglądarka", + "unknown_device": "Nieznane urządzenie", + "uri_label": "Uri", + "user_id_label": "Identyfikator użytkownika", + "username_label": "Nazwa użytkownika" + }, + "session_detail": { + "alert": { + "button": "Wróć", + "text": "Ta sesja nie istnieje lub nie jest już aktywna.", + "title": "Nie można znaleźć sesji: {{deviceId}}" + } + }, + "unknown_route": "Nieznana trasa {{route}}", + "unverified_email_alert": { + "button": "Przejrzyj i zweryfikuj", + "text:one": "Masz {{count}} niezweryfikowany adres e-mail.", + "text:few": "Masz {{count}} niezweryfikowane adresy e-mail.", + "text:many": "Masz {{count}} niezweryfikowanych adresów e-mail.", + "title": "Niezweryfikowany adres e-mail" + }, + "user_email": { + "cant_delete_primary": "Aby usunąć ten adres, wybierz inny główny adres e-mail.", + "delete_button_confirmation_modal": { + "action": "Usuń adres e-mail", + "body": "Usunąć ten adres e-mail?", + "incorrect_password": "Niepoprawne hasło. Spróbuj ponownie.", + "password_confirmation": "Potwierdź hasło do swojego konta, aby usunąć ten adres e-mail" + }, + "delete_button_title": "Usuń adres e-mail", + "email": "E-mail", + "make_primary_button": "Ustaw jako główne", + "not_verified": "Nie zweryfikowano", + "primary_email": "Główny adres e-mail", + "retry_button": "Wyślij ponownie kod", + "unverified": "Niezweryfikowany" + }, + "user_email_list": { + "heading": "E-maile", + "no_primary_email_alert": "Brak głównego adresu e-mail" + }, + "user_greeting": { + "error": "Nie udało się załadować użytkownika" + }, + "user_name": { + "display_name_field_label": "Nazwa wyświetlana" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} aktywna sesja", + "active_sessions:few": "{{count}} aktywne sesje", + "active_sessions:many": "{{count}} aktywnych sesji", + "heading": "Gdzie jesteś zalogowany", + "no_active_sessions": { + "default": "Nie jesteś zalogowany w żadnej aplikacji.", + "inactive_90_days": "Wszystkie Twoje sesje były aktywne w ciągu ostatnich 90 dni." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Kod stracił ważność. Poproś o nowy kod.", + "title": "Kod wygasł" + }, + "code_field_error": "Kod nie został rozpoznany", + "code_field_label": "6-cyfrowy kod", + "code_field_wrong_shape": "Kod musi składać się z 6 cyfr", + "email_sent_alert": { + "description": "Wprowadź nowy kod poniżej.", + "title": "Wysłano nowy kod" + }, + "enter_code_prompt": "Wprowadź 6-cyfrowy kod wysłany na adres: {{email}}", + "heading": "Zweryfikuj swój adres e-mail", + "invalid_code_alert": { + "description": "Sprawdź kod wysłany na Twój adres e-mail i zaktualizuj poniższe pola, aby kontynuować.", + "title": "Wpisałeś zły kod" + }, + "resend_code": "Wyślij ponownie kod", + "resend_email": "Wyślij ponownie wiadomość e-mail", + "sent": "Wysłano!", + "unknown_email": "Nieznany adres e-mail" + } + }, + "mas": { + "scope": { + "edit_profile": "Edytuj swój profil i dane kontaktowe", + "manage_sessions": "Zarządzaj swoimi urządzeniami i sesjami", + "mas_admin": "Zarządzaj użytkownikami (urn:mas:admin)", + "send_messages": "Wysyłaj nowe wiadomości w Twoim imieniu", + "synapse_admin": "Administrowanie serwerem (urn:synapse:admin:*)", + "view_messages": "Przegląd istniejących wiadomości i danych", + "view_profile": "Przegląd informacji o profilu i danych kontaktowych" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/pt.json b/matrix-authentication-service/frontend/locales/pt.json new file mode 100644 index 00000000..5503dd64 --- /dev/null +++ b/matrix-authentication-service/frontend/locales/pt.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Voltar", + "cancel": "Cancelar", + "clear": "Limpar", + "close": "Fechar", + "collapse": "Fechar", + "confirm": "Confirmar", + "continue": "Continuar", + "edit": "Editar", + "expand": "Expandir", + "save": "Guardar", + "save_and_continue": "Guardar e Continuar", + "sign_out": "Terminar sessão", + "start_over": "Recomeçar" + }, + "branding": { + "privacy_policy": { + "alt": "Link para a política de privacidade do serviço", + "link": "Política de privacidade" + }, + "terms_and_conditions": { + "alt": "Link para os termos e condições do serviço", + "link": "Termos e Condições" + } + }, + "common": { + "add": "Adicionar", + "e2ee": "Encriptação de ponta a ponta", + "error": "Erro", + "loading": "A carregar...", + "next": "Seguinte", + "password": "Palavra-passe", + "previous": "Anterior", + "saved": "Guardado", + "saving": "A guardar…" + }, + "frontend": { + "account": { + "account_password": "Palavra-passe da conta", + "contact_info": "Informações de contacto", + "delete_account": { + "alert_description": "Esta conta será permanentemente eliminada e deixará de ter acesso a todas as suas mensagens.", + "alert_title": "Está prestes a perder todos os seus dados", + "button": "Eliminar conta", + "dialog_description": "Confirme que pretende eliminar a conta:\n\n\nNão será possível reativar a conta\nDeixará de poder iniciar sessão\nNinguém poderá reutilizar o seu nome de utilizador (MXID), incluindo o próprio\nDeixará todas as salas e mensagens diretas em que participa\nSerá removido do servidor de identidade e ninguém o poderá encontrar com o seu e-mail ou número de telefone\n\nAs suas mensagens antigas continuarão visíveis para as pessoas que as receberam. Gostaria de ocultar as suas mensagens enviadas a pessoas que entrem em salas no futuro?", + "dialog_title": "Eliminar esta conta?", + "erase_checkbox_label": "Sim, ocultar todas as minhas mensagens de novos marceneiros", + "incorrect_password": "Palavra-passe incorreta, tente novamente", + "mxid_label": "Confirme o seu ID do Matrix ({{ mxid }})", + "mxid_mismatch": "Este valor não corresponde ao seu ID da matrix", + "password_label": "Introduza a sua palavra-passe para continuar" + }, + "edit_profile": { + "display_name_help": "Isto é o que os outros verão sempre que tiver sessão iniciada.", + "display_name_label": "Nome de exibição", + "title": "Editar perfil", + "username_label": "Nome de utilizador" + }, + "password": { + "change": "Alterar palavra-passe", + "change_disabled": "As alterações de palavra-passe são desativadas pelo administrador.", + "label": "Palavra-passe" + }, + "sign_out": { + "button": "Terminar sessão na conta", + "dialog": "Terminar sessão nesta conta?" + }, + "title": "A sua conta" + }, + "add_email_form": { + "email_denied_alert": { + "text": "O e-mail inserido não é permitido pela política do servidor.", + "title": "E-mail negado pela política" + }, + "email_denied_error": "O e-mail inserido não é permitido pela política do servidor", + "email_exists_alert": { + "text": "O e-mail introduzido já foi adicionado a esta conta", + "title": "O e-mail já existe" + }, + "email_exists_error": "O e-mail introduzido já foi adicionado a esta conta", + "email_field_help": "Adicione um e-mail alternativo que possa utilizar para aceder a esta conta.", + "email_field_label": "Adicionar e-mail", + "email_in_use_error": "O e-mail introduzido já está a ser utilizado", + "email_invalid_alert": { + "text": "O e-mail introduzido é inválido", + "title": "E-mail inválido" + }, + "email_invalid_error": "O e-mail introduzido é inválido", + "incorrect_password_error": "Palavra-passe incorreta, tente novamente", + "password_confirmation": "Confirme a palavra-passe da sua conta para adicionar este endereço de e-mail" + }, + "app_sessions_list": { + "error": "Falha ao carregar sessões da aplicação", + "heading": "Aplicações" + }, + "browser_session_details": { + "current_badge": "Atual", + "session_details_title": "Sessão" + }, + "browser_sessions_overview": { + "body:one": "{{count}} sessão ativa", + "body:other": "{{count}} sessões ativas", + "heading": "Navegadores", + "no_active_sessions": { + "default": "Não tem sessão iniciada em nenhum navegador Web.", + "inactive_90_days": "Todas as suas sessões estiveram ativas nos últimos 90 dias." + }, + "view_all_button": "Ver tudo" + }, + "compat_session_detail": { + "client_details_title": "Informação do cliente", + "name": "Nome", + "session_details_title": "Sessão" + }, + "device_type_icon_label": { + "desktop": "Ambiente de trabalho", + "mobile": "Telemóvel", + "pc": "Computador", + "tablet": "Tablet", + "unknown": "Tipo de dispositivo desconhecido", + "web": "Web" + }, + "email_in_use": { + "heading": "O endereço de e-mail {{email}} já está a ser utilizado." + }, + "end_session_button": { + "confirmation_modal_title": "Tem a certeza de que quer terminar esta sessão?", + "text": "Remover dispositivo" + }, + "error": { + "hideDetails": "Ocultar detalhes", + "showDetails": "Mostrar detalhes", + "subtitle": "Ocorreu um erro inesperado. Por favor, tente novamente.", + "title": "Algo correu mal" + }, + "error_boundary_title": "Algo correu mal", + "errors": { + "field_required": "Este campo é obrigatório", + "rate_limit_exceeded": "Efetuou demasiadas solicitações num curto espaço de tempo. Aguarde alguns minutos e tente novamente." + }, + "last_active": { + "active_date": "Ativo {{relativeDate}}", + "active_now": "Ativo agora", + "inactive_90_days": "Inativo por 90+ dias" + }, + "nav": { + "devices": "Dispositivos", + "plan": "Plano", + "profile": "Perfil", + "sessions": "Sessões", + "settings": "Configurações" + }, + "not_found_alert_title": "Não encontrado.", + "not_logged_in_alert": "Não tem sessão iniciada.", + "oauth2_client_detail": { + "details_title": "Informação do cliente", + "id": "ID do Cliente", + "name": "Nome", + "policy": "Política", + "terms": "Termos de serviço" + }, + "oauth2_session_detail": { + "client_details_name": "Nome", + "client_title": "Informação do cliente", + "session_details_title": "Sessão" + }, + "pagination_controls": { + "total": "Total: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Palavra-passe atual", + "failure": { + "description": { + "account_locked": "A sua conta está bloqueada e não pode ser recuperada neste momento. Se isso não for esperado, entre em contacto com o administrador do servidor.", + "expired_recovery_ticket": "O link de recuperação expirou. Inicie o processo de recuperação da conta desde o início.", + "invalid_new_password": "A nova senha que você escolheu é inválida; ela pode não atender à política de segurança configurada.", + "no_current_password": "Você não tem uma senha atual.", + "no_such_recovery_ticket": "O link de recuperação é inválido. Se copiou o link a partir do e-mail de recuperação, verifique se o copiou na totalidade.", + "password_changes_disabled": "As alterações de palavra-passe estão desactivadas.", + "recovery_ticket_already_used": "O link de recuperação já foi utilizado e não pode ser utilizado novamente.", + "unspecified": "Este pode ser um problema temporário, por isso, tente novamente mais tarde. Se o problema persistir, entre em contato com o administrador do servidor.", + "wrong_password": "A palavra-passe que forneceu como palavra-passe atual está incorreta. Por favor, tente novamente." + }, + "title": "Falha na actualização da palavra-passe" + }, + "new_password_again_label": "Introduza novamente a nova palavra-passe", + "new_password_label": "Nova palavra-passe", + "passwords_match": "As palavras-passe coincidem!", + "passwords_no_match": "As palavras-passe não coincidem", + "subtitle": "Escolha uma nova palavra-passe para a sua conta.", + "success": { + "description": "A sua palavra-passe foi actualizada com sucesso.", + "title": "Palavra-passe Atualizada" + }, + "title": "Alterar a sua palavra-passe" + }, + "password_reset": { + "consumed": { + "subtitle": "Para criar uma nova palavra-passe, comece de novo e selecione \"Esqueci-me da palavra-passe\".", + "title": "O link para redefinir sua senha já foi usado" + }, + "expired": { + "resend_email": "Reenviar e-mail", + "subtitle": "Solicitar um novo e-mail que será enviado para: {{email}}", + "title": "A ligação para redefinir a sua palavra-passe expirou" + }, + "subtitle": "Escolha uma nova palavra-passe para a sua conta.", + "title": "Repor a sua palavra-passe" + }, + "password_strength": { + "placeholder": "Força da palavra-passe", + "score": { + "0": "Palavra-passe extremamente fraca", + "1": "Palavra-passe muito fraca", + "2": "Palavra-passe fraca", + "3": "Palavra-passe forte", + "4": "Palavra-passe muito forte" + }, + "suggestion": { + "all_uppercase": "Colocar algumas letras em maiúsculas, mas não todas.", + "another_word": "Adicione mais palavras que são menos comuns.", + "associated_years": "Evite anos que lhe estão associados.", + "capitalization": "Colocar mais maiúsculas do que a primeira letra.", + "dates": "Evite datas e anos associados a si.", + "l33t": "Evite substituições de letras previsíveis como '@' por 'a'.", + "longer_keyboard_pattern": "Use padrões de teclado mais longos e altere a direção da digitação várias vezes.", + "no_need": "Você pode criar senhas fortes sem usar símbolos, números ou letras maiúsculas.", + "pwned": "Se utilizar esta palavra-passe noutro local, deve alterá-la.", + "recent_years": "Evite os últimos anos.", + "repeated": "Evite palavras e caracteres repetidos.", + "reverse_words": "Evite grafias invertidas de palavras comuns.", + "sequences": "Evite sequências de caracteres comuns.", + "use_words": "Use várias palavras, mas evite frases comuns." + }, + "too_weak": "Esta palavra-passe é demasiado fraca", + "warning": { + "common": "Esta é uma palavra-passe frequentemente utilizada.", + "common_names": "Nomes e sobrenomes comuns são fáceis de adivinhar.", + "dates": "As datas são fáceis de adivinhar.", + "extended_repeat": "Padrões repetidos de carateres, como 'abcabcabc', são fáceis de adivinhar.", + "key_pattern": "Padrões de teclado curtos são fáceis de adivinhar.", + "names_by_themselves": "Nomes individuais ou sobrenomes são fáceis de adivinhar.", + "pwned": "A sua palavra-passe foi exposta por uma violação de dados na Internet.", + "recent_years": "Os últimos anos são fáceis de adivinhar.", + "sequences": "Sequências de caracteres comuns como \"abc\" são fáceis de adivinhar.", + "similar_to_common": "Isso é semelhante a uma senha comumente usada.", + "simple_repeat": "Caracteres repetidos como \"aaa\" são fáceis de adivinhar.", + "straight_row": "Linhas retas de teclas no teclado são fáceis de adivinhar.", + "top_hundred": "Esta é uma palavra-passe frequentemente utilizada.", + "top_ten": "Esta é uma palavra-passe muito utilizada.", + "user_inputs": "Não deve haver quaisquer dados pessoais ou relacionados com a página.", + "word_by_itself": "Palavras isoladas são fáceis de adivinhar." + } + }, + "reset_cross_signing": { + "button": "Redefinir identidade", + "cancelled": { + "description_1": "Pode fechar esta janela e voltar à aplicação para continuar.", + "description_2": "Caso tenha terminado sessão em todos os dispositivos e não se recorde do seu código de recuperação, continuará a ser necessário repor a sua identidade.", + "heading": "Redefinição de identidade cancelada." + }, + "description": "Se não tiver sessão iniciada noutros dispositivos e tiver perdido a sua chave de recuperação, terá de repor a sua identidade para continuar a utilizar a aplicação.", + "effect_list": { + "negative_1": "Perderá o histórico de mensagens existente", + "negative_2": "Terá de verificar novamente todos os seus dispositivos e contactos existentes.", + "neutral_1": "Perderá qualquer histórico de mensagens que esteja armazenado apenas no servidor", + "neutral_2": "Terá de verificar novamente todos os seus dispositivos e contactos existentes.", + "positive_1": "Os detalhes da sua conta, contactos, preferências e lista de conversação serão mantidos" + }, + "failure": { + "description": "Este pode ser um problema temporário, por isso, tente novamente mais tarde. Se o problema persistir, entre em contato com o administrador do servidor.", + "heading": "Falha ao permitir a reposição da identidade criptográfica", + "title": "Falha ao permitir identidade de criptografia" + }, + "finish_reset": "Concluir reposição", + "heading": "Redefina sua identidade caso não possa confirmar de outra forma", + "start_reset": "Iniciar reposição", + "success": { + "description": "A redefinição de identidade foi aprovada para os próximos {{minutes}} minutos. Pode fechar esta janela e voltar à aplicação para continuar.", + "heading": "Redefinição de identidade com êxito. Volte ao aplicativo para concluir o processo.", + "title": "Redefinição de identidade criptográfica permitida temporariamente" + }, + "warning": "Só reponha a sua identidade se não tiver acesso a outro dispositivo com sessão iniciada e tiver perdido a sua chave de recuperação." + }, + "selectable_session": { + "label": "Selecionar sessão" + }, + "session": { + "client_id_label": "ID do Cliente", + "current": "Atual", + "current_badge": "Atual", + "device_id_label": "ID de dispositivo", + "finished_date": "Concluído ", + "finished_label": "Concluído", + "generic_browser_session": "Sessão do navegador", + "id_label": "Identificação", + "ip_label": "Endereço IP", + "last_active_label": "Ativo pela última vez", + "last_auth_label": "Última autenticação", + "name_for_platform": "{{name}}para{{platform}}", + "scopes_label": "Âmbitos de aplicação", + "set_device_name": { + "help": "Defina um nome que o ajude a identificar este dispositivo.", + "label": "Nome do dispositivo", + "title": "Editar nome do dispositivo" + }, + "signed_in_date": "Sessão iniciada ", + "signed_in_label": "Sessão iniciada", + "title": "Detalhes do dispositivo", + "unknown_browser": "Navegador desconhecido", + "unknown_device": "Dispositivo desconhecido", + "uri_label": "Uri", + "user_id_label": "ID de utilizador", + "username_label": "Nome do utilizador" + }, + "session_detail": { + "alert": { + "button": "Volta atrás", + "text": "Esta sessão não existe ou não está mais ativa.", + "title": "Não é possível encontrar sessão: {{deviceId}}" + } + }, + "unknown_route": "Rota desconhecida {{route}}", + "unverified_email_alert": { + "button": "Rever e verificar", + "text:one": "Você tem {{count}} endereço de e-mail não verificado.", + "text:other": "Você tem {{count}} endereços de e-mail não verificados.", + "title": "E-mail não verificado" + }, + "user_email": { + "cant_delete_primary": "Escolha um e-mail principal diferente para excluir este.", + "delete_button_confirmation_modal": { + "action": "Excluir e-mail", + "body": "Excluir este e-mail?", + "incorrect_password": "Palavra-passe incorreta, tente novamente", + "password_confirmation": "Confirme a palavra-passe da sua conta para eliminar este endereço de correio eletrónico" + }, + "delete_button_title": "Remover endereço de e-mail", + "email": "Email", + "make_primary_button": "Tornar primário", + "not_verified": "Não verificado", + "primary_email": "E-mail primário", + "retry_button": "Reenviar código", + "unverified": "Não verificado" + }, + "user_email_list": { + "heading": "E-mails", + "no_primary_email_alert": "Sem endereço de e-mail principal" + }, + "user_greeting": { + "error": "Falha ao carregar o usuário" + }, + "user_name": { + "display_name_field_label": "Nome de exibição" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} sessão ativa", + "active_sessions:other": "{{count}} sessões ativas", + "heading": "Onde tem sessão iniciada", + "no_active_sessions": { + "default": "Não tem sessão iniciada em nenhuma aplicação.", + "inactive_90_days": "Todas as suas sessões estiveram ativas nos últimos 90 dias." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "O código expirou. Solicite um novo código.", + "title": "Código expirado" + }, + "code_field_error": "Código não reconhecido", + "code_field_label": "Código de 6 dígitos", + "code_field_wrong_shape": "O código deve ter 6 dígitos", + "email_sent_alert": { + "description": "Insira o novo código abaixo.", + "title": "Novo código enviado" + }, + "enter_code_prompt": "Insira o código de 6 dígitos enviado para: {{email}}", + "heading": "Verifique o seu e-mail", + "invalid_code_alert": { + "description": "Verifique o código enviado para o seu e-mail e atualize os campos abaixo para continuar.", + "title": "Introduziu o código errado" + }, + "resend_code": "Reenviar código", + "resend_email": "Reenviar e-mail", + "sent": "Enviado!", + "unknown_email": "E-mail desconhecido" + } + }, + "mas": { + "scope": { + "edit_profile": "Editar o seu perfil e detalhes de contacto", + "manage_sessions": "Gerir os seus dispositivos e sessões", + "mas_admin": "Administrar qualquer usuário no matrix-authentication-service", + "send_messages": "Envie novas mensagens em seu nome", + "synapse_admin": "Administrar o servidor Synapse", + "view_messages": "Ver as mensagens e os dados existentes", + "view_profile": "Ver as informações do seu perfil e detalhes de contacto" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/ru.json b/matrix-authentication-service/frontend/locales/ru.json new file mode 100644 index 00000000..914621be --- /dev/null +++ b/matrix-authentication-service/frontend/locales/ru.json @@ -0,0 +1,404 @@ +{ + "action": { + "back": "Назад", + "cancel": "Отмена", + "clear": "Очистить", + "close": "Закрыть", + "collapse": "Свернуть", + "confirm": "Подтвердить", + "continue": "Продолжить", + "edit": "Редактировать", + "expand": "Развернуть", + "save": "Сохранить", + "save_and_continue": "Сохранить и продолжить", + "sign_out": "Выйти", + "start_over": "Начать сначала" + }, + "branding": { + "privacy_policy": { + "alt": "Ссылка на политику конфиденциальности сервиса", + "link": "Политика конфидециальности" + }, + "terms_and_conditions": { + "alt": "Ссылка на условия предоставления услуг", + "link": "Правила и условия" + } + }, + "common": { + "add": "Добавить", + "e2ee": "Сквозное шифрование", + "error": "Ошибка", + "loading": "Загрузка…", + "next": "Далее", + "password": "Пароль", + "previous": "Предыдущий", + "saved": "Сохранено", + "saving": "Сохранение…" + }, + "frontend": { + "account": { + "account_password": "Пароль учётной записи", + "contact_info": "Контактная информация", + "delete_account": { + "alert_description": "Эта учетная запись будет навсегда удалена, и вы потеряете доступ к своим сообщениям.", + "alert_title": "Вы потеряете все свои данные", + "button": "Удалить аккаунт", + "dialog_description": "Подтвердите, что вы хотите удалить свою учетную запись:\n\n\nВы не сможете восстановить свою учетную запись\nВы больше не сможете войти\nНикто не сможет повторно использовать ваше имя пользователя (MXID), включая вас\nВы покинете все комнаты и личные чаты, в которых вы находитесь\nВы будете удалены с сервера идентификации, и никто не сможет найти вас по вашей электронной почте или номеру телефона\n\nВаши старые сообщения по-прежнему будут видны людям, которые их получили. Хотите скрыть отправленные вами сообщения от людей, которые присоединятся к комнатам в будущем?", + "dialog_title": "Удалить этот аккаунт?", + "erase_checkbox_label": "Да, скрыть все мои сообщения от новых участников", + "incorrect_password": "Неверный пароль, попробуйте еще раз", + "mxid_label": "Введите свой идентификатор Matrix ({{ mxid }} )", + "mxid_mismatch": "Это значение не соответствует вашему идентификатору Matrix.", + "password_label": "Введите пароль, чтобы продолжить" + }, + "edit_profile": { + "display_name_help": "Это то, что другие пользователи увидят, когда вы войдёте.", + "display_name_label": "Отображаемое имя", + "title": "Редактировать профиль", + "username_label": "Псевдоним" + }, + "password": { + "change": "Изменить пароль", + "change_disabled": "Смена пароля отключена администратором.", + "label": "Пароль" + }, + "sign_out": { + "button": "Выйти из учётной записи", + "dialog": "Выйти из этой учётной записи?" + }, + "title": "Ваша учетная запись" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Введенный адрес электронной почты не разрешен политикой сервера.", + "title": "Электронная почта запрещена политикой" + }, + "email_denied_error": "Введенный адрес электронной почты запрещён политикой сервера", + "email_exists_alert": { + "text": "Введенный адрес электронной почты уже добавлен в эту учётную запись", + "title": "Электронная почта уже существует" + }, + "email_exists_error": "Введенный адрес электронной почты уже добавлен в эту учётную запись", + "email_field_help": "Добавьте ещё один адрес электронной почты, который можно будет использовать для входа в эту учетную запись.", + "email_field_label": "Добавить электронную почту", + "email_in_use_error": "Введенный адрес электронной почты уже используется", + "email_invalid_alert": { + "text": "Введенный адрес электронной почты недействителен", + "title": "Недействительный адрес электронной почты" + }, + "email_invalid_error": "Введенный адрес электронной почты недействителен", + "incorrect_password_error": "Неверный пароль, попробуйте еще раз", + "password_confirmation": "Введите пароль учетной записи, чтобы добавить этот адрес электронной почты" + }, + "app_sessions_list": { + "error": "Не удалось загрузить сеансы приложений", + "heading": "Приложения" + }, + "browser_session_details": { + "current_badge": "Текущее", + "session_details_title": "Сессия" + }, + "browser_sessions_overview": { + "body:one": "{{count}} активная сессия", + "body:few": "{{count}} активных сессий", + "body:many": "{{count}} активных сессий", + "heading": "Браузеры", + "no_active_sessions": { + "default": "Вы не вошли ни через один веб-браузер.", + "inactive_90_days": "Все ваши сессии активные в течение последних 90 дней." + }, + "view_all_button": "Смотреть все" + }, + "compat_session_detail": { + "client_details_title": "Информация о клиенте", + "name": "Название", + "session_details_title": "Сессия" + }, + "device_type_icon_label": { + "desktop": "Компьютер", + "mobile": "Переносное устройство", + "pc": "Компьютер", + "tablet": "Планшет", + "unknown": "Неизвестный тип устройства", + "web": "Веб" + }, + "email_in_use": { + "heading": "Адрес электронной почты {{email}} уже используется." + }, + "end_session_button": { + "confirmation_modal_title": "Вы уверены, что хотите завершить этот сеанс?", + "text": "Удалить устройство" + }, + "error": { + "hideDetails": "Скрыть подробности", + "showDetails": "Показать подробности", + "subtitle": "Произошла непредвиденная ошибка. Пожалуйста, попробуйте еще раз.", + "title": "Что-то пошло не так" + }, + "error_boundary_title": "Что-то пошло не так", + "errors": { + "field_required": "Это поле обязательно для заполнения", + "rate_limit_exceeded": "Вы делаете запросы слишком часто. Пожалуйста, подождите несколько минут и повторите попытку." + }, + "last_active": { + "active_date": "Активен {{relativeDate}}", + "active_now": "Активен сейчас", + "inactive_90_days": "Неактивен более 90 дней" + }, + "nav": { + "devices": "Устройства", + "plan": "Тарифный план", + "profile": "Профиль", + "sessions": "Сессии", + "settings": "Настройки" + }, + "not_found_alert_title": "Не найдено.", + "not_logged_in_alert": "Вы не вошли.", + "oauth2_client_detail": { + "details_title": "Информация о клиенте", + "id": "ID клиента", + "name": "Название", + "policy": "Политика", + "terms": "Условия обслуживания" + }, + "oauth2_session_detail": { + "client_details_name": "Название", + "client_title": "Информация о клиенте", + "session_details_title": "Сессия" + }, + "pagination_controls": { + "total": "Всего: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Текущий пароль", + "failure": { + "description": { + "account_locked": "Ваша учетная запись заблокирована и в данный момент не может быть восстановлена. Если вы считаете что это ошибка, обратитесь к администратору вашего сервера.", + "expired_recovery_ticket": "Срок действия ссылки на восстановление истек. Пожалуйста, начните процесс восстановления учётной записи с самого начала.", + "invalid_new_password": "Выбранный вами новый пароль не подходит. Возможно он не соответствует установленной политике безопасности.", + "no_current_password": "У вас нет текущего пароля.", + "no_such_recovery_ticket": "Ссылка на восстановление недействительна. Если вы скопировали ссылку из письма для восстановления, проверьте, что она была скопирована полностью.", + "password_changes_disabled": "Изменение пароля отключено.", + "recovery_ticket_already_used": "Ссылка на восстановление уже использовалась. Её нельзя использовать снова.", + "unspecified": "Возможно, это временная проблема, поэтому повторите попытку позже. Если проблема сохраняется, обратитесь к администратору сервера.", + "wrong_password": "Пароль, который вы указали в качестве текущего пароля, неверен. Пожалуйста, попробуйте еще раз." + }, + "title": "Не удалось обновить пароль" + }, + "new_password_again_label": "Введите новый пароль еще раз", + "new_password_label": "Новый пароль", + "passwords_match": "Пароли совпадают!", + "passwords_no_match": "Пароли не совпадают", + "subtitle": "Выберите новый пароль для вашей учётной записи.", + "success": { + "description": "Ваш пароль успешно изменён.", + "title": "Пароль изменён" + }, + "title": "Изменить пароль" + }, + "password_reset": { + "consumed": { + "subtitle": "Чтобы создать новый пароль, начните сначала и выберите \"Забыли пароль\".", + "title": "Ссылка для сброса пароля уже использовалась" + }, + "expired": { + "resend_email": "Отправить письмо повторно", + "subtitle": "Запросите новое электронное письмо, которое будет отправлено на: {{email}}", + "title": "Срок действия ссылки для сброса пароля истек" + }, + "subtitle": "Выберите новый пароль для вашей учётной записи.", + "title": "Сброс пароля" + }, + "password_strength": { + "placeholder": "Надёжность пароля", + "score": { + "0": "Чрезвычайно слабый пароль", + "1": "Очень слабый пароль", + "2": "Слабый пароль", + "3": "Надёжный пароль", + "4": "Очень надёжный пароль" + }, + "suggestion": { + "all_uppercase": "Пишите некоторые буквы заглавными", + "another_word": "Добавьте больше слов, которые встречаются редко.", + "associated_years": "Избегайте даты, которые ассоциируются с вами.", + "capitalization": "Делайте заглавными не только первые буквы.", + "dates": "Избегайте даты, которые ассоциируются с вами.", + "l33t": "Избегайте предсказуемые замены букв, например, \"@\" на \"a\".", + "longer_keyboard_pattern": "Используйте более длинные комбинации клавиш и несколько раз меняйте направление ввода.", + "no_need": "Вы можете создавать надежные пароли без использования символов, цифр и заглавных букв.", + "pwned": "Если ты используешь этот пароль в других местах, то тебе следует его изменить.", + "recent_years": "Избегайте последних годов.", + "repeated": "Избегайте повторяющихся слов и символов.", + "reverse_words": "Избегайте обратного написания распространенных слов.", + "sequences": "Избегайте распространенных последовательностей символов.", + "use_words": "Используйте много слов, но избегайте обычных фраз." + }, + "too_weak": "Этот пароль слишком слабый", + "warning": { + "common": "Это часто используемый пароль.", + "common_names": "Распространенные имена и фамилии легко угадать.", + "dates": "Даты легко угадать.", + "extended_repeat": "Повторяющиеся символы, например \"abcabcabc\", легко угадать.", + "key_pattern": "Короткие комбинации клавиш легко угадать.", + "names_by_themselves": "Одиночные имена или фамилии легко угадать.", + "pwned": "Твой пароль был раскрыт в результате утечки данных в Интернете.", + "recent_years": "Последние года легко угадать.", + "sequences": "Обычные последовательности символов, такие как \"abc\", легко угадать.", + "similar_to_common": "Это похоже на часто используемый пароль.", + "simple_repeat": "Повторяющиеся символы, такие как \"aaa\", легко угадать.", + "straight_row": "Прямые ряды клавиш на клавиатуре легко угадать.", + "top_hundred": "Это часто используемый пароль.", + "top_ten": "Это часто используемый пароль.", + "user_inputs": "Не должно быть никаких личных данных или данных, связанных со страницами.", + "word_by_itself": "Отдельные слова легко угадать." + } + }, + "reset_cross_signing": { + "button": "Сбросить идентификацию", + "cancelled": { + "description_1": "Вы можете закрыть это окно и вернуться в приложение, чтобы продолжить.", + "description_2": "Если вы вышли из системы везде и не помните код восстановления, вам всё равно потребуется сбросить идентификацию.", + "heading": "Сброс идентификации отменен." + }, + "description": "Если у вас нет устройства на котором выполнен вход и вы потеряли ключ восстановления, то для продолжения работы с приложением вам придется сбросить ключ шифрования.", + "effect_list": { + "negative_1": "Вы потеряете существующую историю сообщений", + "negative_2": "Вам нужно будет заново проверить все существующие устройства и контакты.", + "neutral_1": "Вы потеряете всю историю сообщений, которая хранится только на сервере", + "neutral_2": "Вам нужно будет заново проверить все существующие устройства и контакты.", + "positive_1": "Данные твоего аккаунта, контакты, предпочтения и список чатов будут сохранены" + }, + "failure": { + "description": "Возможно, это временная проблема, поэтому повторите попытку позже. Если проблема сохраняется, обратитесь к администратору сервера.", + "heading": "Не удалось разрешить сброс криптоидентификатора", + "title": "Не удалось разрешить криптоидентификацию" + }, + "finish_reset": "Завершить сброс", + "heading": "Сбросьте идентификацию, если вы не можете подтвердить свою личность другим путём", + "start_reset": "Начать сброс", + "success": { + "description": "Сброс идентификации разрешён на {{minutes}} минут. Вы можете закрыть это окно и вернуться в приложение чтобы продолжить.", + "heading": "Идентификация успешно сброшена. Вернитесь в приложение, чтобы завершить процесс.", + "title": "Сброс криптоидентификатора временно разрешен" + }, + "warning": "Сбрасывайте ключ шифрования только в том случае, если у вас нет доступа к другому устройству, на котором выполнен вход, и вы потеряли ключ восстановления." + }, + "selectable_session": { + "label": "Выбрать сессию" + }, + "session": { + "client_id_label": "ID клиента", + "current": "Текущее", + "current_badge": "Текущее", + "device_id_label": "Идентификатор устройства", + "finished_date": "Завершена ", + "finished_label": "Завершено", + "generic_browser_session": "Сессия браузера", + "id_label": "Идентификатор", + "ip_label": "IP-адрес", + "last_active_label": "Последняя активность", + "last_auth_label": "Последняя аутентификация", + "name_for_platform": "{{name}} для {{platform}}", + "scopes_label": "Области", + "set_device_name": { + "help": "Установите имя, которое поможет вам идентифицировать это устройство.", + "label": "Имя устройства", + "title": "Переименовать устройство" + }, + "signed_in_date": "Вошёл ", + "signed_in_label": "Вошёл в систему", + "title": "Сведения об устройстве", + "unknown_browser": "Неизвестный браузер", + "unknown_device": "Неизвестное устройство", + "uri_label": "URI", + "user_id_label": "ID пользователя", + "username_label": "Имя пользователя" + }, + "session_detail": { + "alert": { + "button": "Назад", + "text": "Эта сессия не существует или больше не активна.", + "title": "Не удалось найти сеанс: {{deviceId}}" + } + }, + "unknown_route": "Неизвестный маршрут {{route}}", + "unverified_email_alert": { + "button": "Просмотрите и подтвердите", + "text:one": "У вас есть {{count}} неподтверждённый адрес электронной почты.", + "text:few": "У вас есть {{count}} неподтверждённых адресов электронной почты.", + "text:many": "У вас есть {{count}} неподтверждённых адресов электронной почты.", + "title": "Неподтвержденный адрес электронной почты" + }, + "user_email": { + "cant_delete_primary": "Сделайте другой электронной почты основным, чтобы удалить этот.", + "delete_button_confirmation_modal": { + "action": "Удалить электронную почту", + "body": "Удалить эту электронную почту?", + "incorrect_password": "Неверный пароль, попробуйте еще раз", + "password_confirmation": "Введите пароль учетной записи, чтобы удалить этот адрес электронной почты" + }, + "delete_button_title": "Удалить адрес электронной почты", + "email": "Электронная почта", + "make_primary_button": "Сделать основным", + "not_verified": "Не подтверждён", + "primary_email": "Основной адрес электронной почты", + "retry_button": "Отправить код повторно", + "unverified": "Не подтвержден" + }, + "user_email_list": { + "heading": "Электронные почты", + "no_primary_email_alert": "Нет основного адреса электронной почты" + }, + "user_greeting": { + "error": "Не удалось загрузить пользователя" + }, + "user_name": { + "display_name_field_label": "Отображаемое имя" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} активная сессия", + "active_sessions:few": "{{count}} активных сессий", + "active_sessions:many": "{{count}} активных сессий", + "heading": "Где выполнен вход", + "no_active_sessions": { + "default": "Вы не вошли ни через одно приложение.", + "inactive_90_days": "Все ваши сессии активные в течение последних 90 дней." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Срок действия кода истек. Пожалуйста, запросите новый код.", + "title": "Срок действия кода истек" + }, + "code_field_error": "Код не распознан", + "code_field_label": "шестизначный код", + "code_field_wrong_shape": "Код должен состоять из 6 цифр", + "email_sent_alert": { + "description": "Введите новый код ниже.", + "title": "Отправлен новый код" + }, + "enter_code_prompt": "Введите 6-значный код, отправленный на: {{email}}", + "heading": "Подтвердите электронную почту", + "invalid_code_alert": { + "description": "Проверьте код отправленный на почту и введите в поле ниже чтобы продолжить.", + "title": "Вы ввели неправильный код" + }, + "resend_code": "Отправить код повторно", + "resend_email": "Отправить письмо повторно", + "sent": "Отправлено!", + "unknown_email": "Неизвестная электронная почта" + } + }, + "mas": { + "scope": { + "edit_profile": "Редактировать профиль и контактную информацию", + "manage_sessions": "Управлять устройствами и сеансами", + "mas_admin": "Управление пользователями (urn:mas:admin)", + "send_messages": "Отправлять новые сообщения от вашего имени", + "synapse_admin": "Администрирование сервера (urn:synapse:admin:*)", + "view_messages": "Просмотравить существующие сообщения и данные", + "view_profile": "Просматривать информации о вашем профиле и контактных данных" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/sv.json b/matrix-authentication-service/frontend/locales/sv.json new file mode 100644 index 00000000..f14d4e02 --- /dev/null +++ b/matrix-authentication-service/frontend/locales/sv.json @@ -0,0 +1,401 @@ +{ + "action": { + "back": "Tillbaka", + "cancel": "Avbryt", + "clear": "Rensa", + "close": "Stäng", + "collapse": "Kollapsa", + "confirm": "Confirm", + "continue": "Fortsätt", + "edit": "Redigera", + "expand": "Expandera", + "save": "Spara", + "save_and_continue": "Spara och fortsätt", + "sign_out": "Logga ut", + "start_over": "Börja om" + }, + "branding": { + "privacy_policy": { + "alt": "Länk till tjänstens sekretesspolicy", + "link": "Sekretesspolicy" + }, + "terms_and_conditions": { + "alt": "Länk till användarvillkoren för tjänsten", + "link": "Allmänna villkor" + } + }, + "common": { + "add": "Lägg till", + "e2ee": "End-to-end-kryptering", + "error": "Fel", + "loading": "Laddar …", + "next": "Nästa", + "password": "Lösenord", + "previous": "Föregående", + "saved": "Sparat", + "saving": "Sparar..." + }, + "frontend": { + "account": { + "account_password": "Kontolösenord", + "contact_info": "Kontaktuppgifter", + "delete_account": { + "alert_description": "This account will be permanently erased and you’ll no longer have access to any of your messages.", + "alert_title": "You’re about to lose all of your data", + "button": "Delete account", + "dialog_description": "Confirm that you would like to delete your account:\n\n\nYou will not be able to reactivate your account\nYou will no longer be able to sign in\nNo one will be able to reuse your username (MXID), including you\nYou will leave all rooms and direct messages you are in\nYou will be removed from the identity server, and no one will be able to find you with your email or phone number\n\nYour old messages will still be visible to people who received them. Would you like to hide your sent messages from people who join rooms in the future?", + "dialog_title": "Delete this account?", + "erase_checkbox_label": "Yes, hide all my messages from new joiners", + "incorrect_password": "Incorrect password, please try again", + "mxid_label": "Confirm your Matrix ID ({{ mxid }})", + "mxid_mismatch": "This value does not match your Matrix ID", + "password_label": "Enter your password to continue" + }, + "edit_profile": { + "display_name_help": "Detta är vad andra kommer att se var du än är inloggad.", + "display_name_label": "Visningsnamn", + "title": "Redigera profil", + "username_label": "Användarnamn" + }, + "password": { + "change": "Ändra lösenord", + "change_disabled": "Lösenordsändringar är inaktiverad av administratören.", + "label": "Lösenord" + }, + "sign_out": { + "button": "Sign out of account", + "dialog": "Sign out of this account?" + }, + "title": "Ditt konto" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Det angivna e-postmeddelandet är inte tillåtet enligt serverpolicyn.", + "title": "E-post nekas av policy" + }, + "email_denied_error": "Den angivna e-posten tillåts inte av serverpolicyn", + "email_exists_alert": { + "text": "Det angivna e-postmeddelandet har redan lagts till i det här kontot", + "title": "E-postadressen finns redan" + }, + "email_exists_error": "Det angivna e-postmeddelandet har redan lagts till i det här kontot", + "email_field_help": "Lägg till en alternativ e-postadress som du kan använda för att komma åt det här kontot.", + "email_field_label": "Lägg till e-post", + "email_in_use_error": "Det angivna e-postmeddelandet används redan", + "email_invalid_alert": { + "text": "Den angivna e-postadressen är ogiltig", + "title": "Ogiltig e-postadress" + }, + "email_invalid_error": "Den angivna e-postadressen är ogiltig", + "incorrect_password_error": "Incorrect password, please try again", + "password_confirmation": "Confirm your account password to add this email address" + }, + "app_sessions_list": { + "error": "Det gick inte att läsa in appsessioner", + "heading": "Appar" + }, + "browser_session_details": { + "current_badge": "Nuvarande", + "session_details_title": "Session" + }, + "browser_sessions_overview": { + "body:one": "{{count}}aktiv session", + "body:other": "{{count}}aktiva sessioner", + "heading": "Webbläsare", + "no_active_sessions": { + "default": "Du är inte inloggad i någon webbläsare.", + "inactive_90_days": "Alla dina sessioner har varit aktiva under de senaste 90 dagarna." + }, + "view_all_button": "Visa alla" + }, + "compat_session_detail": { + "client_details_title": "Kundinformation", + "name": "Namn", + "session_details_title": "Session" + }, + "device_type_icon_label": { + "desktop": "Skrivbord", + "mobile": "Mobil", + "pc": "Dator", + "tablet": "Läsplatta", + "unknown": "Okänd enhetstyp", + "web": "Webb" + }, + "email_in_use": { + "heading": "E-postadressen {{email}} är redan i bruk." + }, + "end_session_button": { + "confirmation_modal_title": "Är du säker på att du vill avsluta denna session?", + "text": "Logga ut" + }, + "error": { + "hideDetails": "Dölj detaljer", + "showDetails": "Visa detaljer", + "subtitle": "Ett oväntat fel inträffade. Vänligen försök igen.", + "title": "Något gick fel" + }, + "error_boundary_title": "Något gick fel", + "errors": { + "field_required": "Detta fält är obligatoriskt", + "rate_limit_exceeded": "Du har gjort för många förfrågningar under en kort period. Vänta några minuter och försök igen." + }, + "last_active": { + "active_date": "Aktiv {{relativeDate}}", + "active_now": "Aktiv nu", + "inactive_90_days": "Inaktiv i 90+ dagar" + }, + "nav": { + "devices": "Enheter", + "plan": "Plan", + "profile": "Profil", + "sessions": "Sessioner", + "settings": "Inställningar" + }, + "not_found_alert_title": "Hittades inte.", + "not_logged_in_alert": "Du är inte inloggad.", + "oauth2_client_detail": { + "details_title": "Kundinformation", + "id": "Kund-ID", + "name": "Namn", + "policy": "Policy", + "terms": "Terms Of Service" + }, + "oauth2_session_detail": { + "client_details_name": "Namn", + "client_title": "Kundinformation", + "session_details_title": "Session" + }, + "pagination_controls": { + "total": "Totalt: {{totalCount}}" + }, + "password_change": { + "current_password_label": "Nuvarande lösenord", + "failure": { + "description": { + "account_locked": "Ditt konto är låst och kan inte återställas just nu. Om du inte förväntat dig detta, kontakta din serveradministratör.", + "expired_recovery_ticket": "Återställningslänken har upphört att gälla. Vänligen starta kontoåterställningsprocessen igen från början.", + "invalid_new_password": "Det nya lösenordet du valde är ogiltigt; det kanske inte uppfyller den konfigurerade säkerhetspolicyn.", + "no_current_password": "Du har inget aktuellt lösenord.", + "no_such_recovery_ticket": "Återställningslänken är ogiltig. Om du kopierade länken från återställnings-e-postmeddelandet, kontrollera att hela länken kopierades.", + "password_changes_disabled": "Lösenordsändringar är inaktiverade.", + "recovery_ticket_already_used": "Återställningslänken har redan använts. Den kan inte användas igen.", + "unspecified": "Detta kan vara ett tillfälligt problem, så försök igen senare. Om problemet kvarstår kontaktar du serveradministratören.", + "wrong_password": "Lösenordet du angav som ditt nuvarande lösenord är felaktigt. Vänligen försök igen." + }, + "title": "Det gick inte att uppdatera lösenordet" + }, + "new_password_again_label": "Ange det nya lösenordet igen", + "new_password_label": "Nytt lösenord", + "passwords_match": "Lösenorden matchar!", + "passwords_no_match": "Lösenorden matchar inte", + "subtitle": "Välj ett nytt lösenord för ditt konto.", + "success": { + "description": "Ditt lösenord har uppdaterats framgångsrikt.", + "title": "Lösenord uppdaterat" + }, + "title": "Ändra ditt lösenord" + }, + "password_reset": { + "consumed": { + "subtitle": "För att skapa ett nytt lösenord, börja om och välj ”Glömt lösenord”.", + "title": "Länken för att återställa ditt lösenord har redan använts" + }, + "expired": { + "resend_email": "Skicka e-post på nytt", + "subtitle": "Begär ett nytt e-postmeddelande som skickas till: {{email}}", + "title": "Länken för att återställa ditt lösenord har upphört att gälla" + }, + "subtitle": "Välj ett nytt lösenord för ditt konto.", + "title": "Återställ ditt lösenord" + }, + "password_strength": { + "placeholder": "Lösenordets styrka", + "score": { + "0": "Extremt svagt lösenord", + "1": "Mycket svagt lösenord", + "2": "Svagt lösenord", + "3": "Starkt lösenord", + "4": "Mycket starkt lösenord" + }, + "suggestion": { + "all_uppercase": "Skriv vissa bokstäver med stor bokstav, men inte alla.", + "another_word": "Lägg till fler ord som är mindre vanliga.", + "associated_years": "Undvik år som är förknippade med dig.", + "capitalization": "Använd stor bokstav på fler än den första bokstaven.", + "dates": "Undvik datum och årtal som är förknippade med dig.", + "l33t": "Undvik förutsägbara bokstavsersättningar som '@' för 'a'.", + "longer_keyboard_pattern": "Använd längre tangentbordsmönster och ändra skrivriktning flera gånger.", + "no_need": "Du kan skapa starka lösenord utan att använda symboler, siffror eller versaler.", + "pwned": "Om du använder det här lösenordet någon annanstans bör du ändra det.", + "recent_years": "Undvik de senaste åren.", + "repeated": "Undvik upprepade ord och tecken.", + "reverse_words": "Undvik omvänd stavning av vanliga ord.", + "sequences": "Undvik vanliga teckensekvenser.", + "use_words": "Använd flera ord, men undvik vanliga fraser." + }, + "too_weak": "Detta lösenord är för svagt", + "warning": { + "common": "Detta är ett vanligt lösenord.", + "common_names": "Vanliga namn och efternamn är lätta att gissa.", + "dates": "Datum är lätta att gissa.", + "extended_repeat": "Upprepade teckenmönster som \"abcabcabc\" är lätta att gissa.", + "key_pattern": "Korta tangentbordsmönster är lätta att gissa.", + "names_by_themselves": "Enstaka namn eller efternamn är lätta att gissa.", + "pwned": "Ditt lösenord har röjts genom ett dataintrång på Internet.", + "recent_years": "De senaste åren är lätta att gissa.", + "sequences": "Vanliga karaktärssekvenser som ”abc” är lätta att gissa.", + "similar_to_common": "Detta liknar ett vanligt använt lösenord.", + "simple_repeat": "Upprepade tecken som ”aaa” är lätta att gissa.", + "straight_row": "Raka rader med tangenter på tangentbordet är lätta att gissa.", + "top_hundred": "Detta är ett lösenord som används ofta.", + "top_ten": "Detta är ett flitigt använt lösenord.", + "user_inputs": "Det ska inte finnas några personliga eller sidrelaterade uppgifter.", + "word_by_itself": "Enstaka ord är lätta att gissa." + } + }, + "reset_cross_signing": { + "button": "Återställ identitet", + "cancelled": { + "description_1": "Du kan stänga det här fönstret och gå tillbaka till appen för att fortsätta.", + "description_2": "Om du är utloggad överallt och inte kommer ihåg din återställningskod måste du fortfarande återställa din identitet.", + "heading": "Identitetsåterställningen har avbrutits." + }, + "description": "Om du inte är inloggad på någon annan enhet och du har tappat bort din återställningsnyckel måste du återställa din identitet för att fortsätta använda appen.", + "effect_list": { + "negative_1": "Du kommer att förlora din befintliga meddelandehistorik", + "negative_2": "Du kommer att behöva verifiera alla dina befintliga enheter och kontakter igen", + "neutral_1": "Du kommer att förlora all meddelandehistorik som bara är lagrad på servern", + "neutral_2": "Du kommer att behöva verifiera alla dina befintliga enheter och kontakter igen", + "positive_1": "Dina kontouppgifter, kontakter, preferenser och chattlista kommer att behållas" + }, + "failure": { + "description": "Detta kan vara ett tillfälligt problem, så försök igen senare. Om problemet kvarstår kontaktar du serveradministratören.", + "heading": "Det gick inte att tillåta återställning av kryptoidentitet", + "title": "Misslyckades med att tillåta kryptoidentitet" + }, + "finish_reset": "Slutför återställningen", + "heading": "Återställ din identitet om du inte kan bekräfta på annat sätt", + "start_reset": "Starta återställning", + "success": { + "description": "Identitetsåterställningen har godkänts för nästa{{minutes}} minuter. Du kan stänga det här fönstret och gå tillbaka till appen för att fortsätta.", + "heading": "Identiteten har återställts. Gå tillbaka till appen för att avsluta processen.", + "title": "Återställning av kryptoidentitet är tillfälligt tillåten" + }, + "warning": "Återställ bara din identitet om du inte har tillgång till en annan inloggad enhet och du har tappat bort din återställningsnyckel." + }, + "selectable_session": { + "label": "Välj session" + }, + "session": { + "client_id_label": "Kund-ID", + "current": "Nuvarande", + "current_badge": "Nuvarande", + "device_id_label": "Enhets-ID", + "finished_date": "Slutförd ", + "finished_label": "Slutförda", + "generic_browser_session": "Browser session", + "id_label": "ID", + "ip_label": "IP-adress", + "last_active_label": "Senast aktiv", + "last_auth_label": "Senaste autentisering", + "name_for_platform": "{{name}} för {{platform}}", + "scopes_label": "Scope", + "set_device_name": { + "help": "Set a name that will help you identify this device.", + "label": "Device name", + "title": "Edit device name" + }, + "signed_in_date": "Inloggad ", + "signed_in_label": "Inloggad", + "title": "Enhetsdetaljer", + "unknown_browser": "Okänd webbläsare", + "unknown_device": "Okänd enhet", + "uri_label": "Uri", + "user_id_label": "Användar-ID", + "username_label": "Användarnamn" + }, + "session_detail": { + "alert": { + "button": "Gå tillbaka", + "text": "Den här sessionen existerar inte eller är inte längre aktiv.", + "title": "Kan inte hitta sessionen: {{deviceId}}" + } + }, + "unknown_route": "Okänd rutt {{route}}", + "unverified_email_alert": { + "button": "Granska och verifiera", + "text:one": "Du har {{count}} obekräftad e-postadress.", + "text:other": "Du har {{count}} obekräftade e-postadresser.", + "title": "Overifierad e-postadress" + }, + "user_email": { + "cant_delete_primary": "Välj en annan primär e-postadress för att radera denna.", + "delete_button_confirmation_modal": { + "action": "Ta bort e-post", + "body": "Radera det här e-postmeddelandet?", + "incorrect_password": "Incorrect password, please try again", + "password_confirmation": "Confirm your account password to delete this email address" + }, + "delete_button_title": "Ta bort e-postadress", + "email": "E-post", + "make_primary_button": "Gör primär", + "not_verified": "Ej verifierad", + "primary_email": "Primär e-post", + "retry_button": "Skicka kod igen", + "unverified": "Overifierad" + }, + "user_email_list": { + "heading": "E-postmeddelanden", + "no_primary_email_alert": "Ingen primär e-postadress" + }, + "user_greeting": { + "error": "Det gick inte att läsa in användaren" + }, + "user_name": { + "display_name_field_label": "Visningsnamn" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}}aktiv session", + "active_sessions:other": "{{count}}aktiva sessioner", + "heading": "Var du är inloggad", + "no_active_sessions": { + "default": "Du är inte inloggad på någon applikation.", + "inactive_90_days": "Alla dina sessioner har varit aktiva under de senaste 90 dagarna." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Koden har upphört att gälla. Begär en ny kod.", + "title": "Koden har löpt ut" + }, + "code_field_error": "Koden känns inte igen", + "code_field_label": "6-siffrig kod", + "code_field_wrong_shape": "Koden måste vara 6 siffror", + "email_sent_alert": { + "description": "Ange den nya koden nedan.", + "title": "Ny kod har skickats." + }, + "enter_code_prompt": "Ange den 6-siffriga koden som skickas till: {{email}}", + "heading": "Verifiera din e-postadress", + "invalid_code_alert": { + "description": "Kontrollera koden som skickats till din e-post och uppdatera fälten nedan för att fortsätta.", + "title": "Du har angett fel kod" + }, + "resend_code": "Skicka kod igen", + "resend_email": "Skicka e-post på nytt", + "sent": "Skickat!", + "unknown_email": "Okänd e-postadress" + } + }, + "mas": { + "scope": { + "edit_profile": "Ändra din profil och kontaktuppgifter", + "manage_sessions": "Hantera dina enheter och sessioner", + "mas_admin": "Administrera valfri användare på matrix-authentication-service", + "send_messages": "Skicka nya meddelanden för din räkning", + "synapse_admin": "Administrera Synapse-hemservern", + "view_messages": "Visa dina befintliga meddelanden och data", + "view_profile": "Visa din profilinformation och kontaktuppgifter" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/uk.json b/matrix-authentication-service/frontend/locales/uk.json new file mode 100644 index 00000000..9b8d60e7 --- /dev/null +++ b/matrix-authentication-service/frontend/locales/uk.json @@ -0,0 +1,404 @@ +{ + "action": { + "back": "Назад", + "cancel": "Скасувати", + "clear": "Очистити", + "close": "Закрити", + "collapse": "Згорнути", + "confirm": "Підтвердити", + "continue": "Продовжити", + "edit": "Редагувати", + "expand": "Розгорнути", + "save": "Зберегти", + "save_and_continue": "Зберегти і продовжити", + "sign_out": "Вийти", + "start_over": "Розпочати знову" + }, + "branding": { + "privacy_policy": { + "alt": "Посилання на політику приватності служби", + "link": "Політика приватності" + }, + "terms_and_conditions": { + "alt": "Посилання на умови надання послуг", + "link": "Умови та положення" + } + }, + "common": { + "add": "Додати", + "e2ee": "Наскрізне шифрування", + "error": "Помилка", + "loading": "Завантаження…", + "next": "Далі", + "password": "Пароль", + "previous": "Назад", + "saved": "Збережено", + "saving": "Збереження..." + }, + "frontend": { + "account": { + "account_password": "Пароль облікового запису", + "contact_info": "Контактна інформація", + "delete_account": { + "alert_description": "Цей обліковий запис буде стерто назавжди, і ви більше не матимете доступу до всіх своїх повідомлень.", + "alert_title": "Ви можете втратити всі свої дані", + "button": "Видалити обліковий запис", + "dialog_description": "Підтвердьте, що хочете видалити свій обліковий запис:\n\n\nВи не зможете повторно активувати свій обліковий запис\nВи більше не зможете ввійти\nНіхто не зможе повторно використовувати ваше ім'я користувача (MXID), включно з вами\nВи вийдете з усіх кімнат та особистих розмов, в яких ви перебуваєте\nВас буде вилучено з сервера ідентифікації, і ніхто не зможе знайти вас за вашою електронною поштою або номером телефону\n\nВаші старі повідомлення все одно будуть видимі людям, які їх отримали. Чи хотіли б ви сховати свої надіслані повідомлення від людей, які приєднаються до кімнат у майбутньому?", + "dialog_title": "Видалити цей обліковий запис?", + "erase_checkbox_label": "Так, сховати всі мої повідомлення від нових учасників", + "incorrect_password": "Неправильний пароль. Повторіть спробу.", + "mxid_label": "Підтвердьте свій Matrix ID ({{ mxid }})", + "mxid_mismatch": "Це значення не збігається з вашим Matrix ID", + "password_label": "Введіть пароль, щоб продовжити" + }, + "edit_profile": { + "display_name_help": "Це те, що бачитимуть інші, коли ви авторизовані.", + "display_name_label": "Псевдонім", + "title": "Редагувати профіль", + "username_label": "Ім'я користувача" + }, + "password": { + "change": "Змінити пароль", + "change_disabled": "Зміна пароля заборонена адміністратором.", + "label": "Пароль" + }, + "sign_out": { + "button": "Вийти з облікового запису", + "dialog": "Вийти з цього облікового запису?" + }, + "title": "Ваш обліковий запис" + }, + "add_email_form": { + "email_denied_alert": { + "text": "Введена електронна адреса заборонена політикою сервера.", + "title": "Електронна пошта відхилена політикою" + }, + "email_denied_error": "Введена електронна адреса заборонена політикою сервера", + "email_exists_alert": { + "text": "Введена електронна пошта вже додана до цього облікового запису", + "title": "Електронна адреса вже існує" + }, + "email_exists_error": "Введена електронна пошта вже додана до цього облікового запису", + "email_field_help": "Додайте альтернативну електронну пошту, яку ви можете використовувати для доступу до цього облікового запису.", + "email_field_label": "Додати електронну пошту", + "email_in_use_error": "Введена електронна адреса вже використовується", + "email_invalid_alert": { + "text": "Введена електронна пошта недійсна", + "title": "Недійсна адреса електронної пошти" + }, + "email_invalid_error": "Введена електронна пошта недійсна", + "incorrect_password_error": "Неправильний пароль. Повторіть спробу.", + "password_confirmation": "Підтвердьте пароль свого облікового запису, щоб додати цю адресу електронної пошти" + }, + "app_sessions_list": { + "error": "Не вдалося завантажити сеанси застосунку", + "heading": "Застосунки" + }, + "browser_session_details": { + "current_badge": "Поточний", + "session_details_title": "Сеанс" + }, + "browser_sessions_overview": { + "body:one": "{{count}} активний сеанс", + "body:few": "{{count}} активні сеанси", + "body:many": "{{count}} активних сеансів", + "heading": "Браузери", + "no_active_sessions": { + "default": "Ви не ввійшли у жодному браузері.", + "inactive_90_days": "Усі ваші сеанси були активними протягом останніх 90 днів." + }, + "view_all_button": "Переглянути все" + }, + "compat_session_detail": { + "client_details_title": "Інформація про клієнт", + "name": "Ім'я", + "session_details_title": "Сеанс" + }, + "device_type_icon_label": { + "desktop": "Комп’ютер", + "mobile": "Мобільний", + "pc": "Комп'ютер", + "tablet": "Планшет", + "unknown": "Невідомий тип пристрою", + "web": "Браузер" + }, + "email_in_use": { + "heading": "Адреса електронної пошти{{email}} вже використовується." + }, + "end_session_button": { + "confirmation_modal_title": "Ви впевнені, що хочете завершити цей сеанс?", + "text": "Вилучити пристрій" + }, + "error": { + "hideDetails": "Сховати подробиці", + "showDetails": "Показати подробиці", + "subtitle": "Сталася неочікувана помилка. Повторіть спробу.", + "title": "Щось пішло не так" + }, + "error_boundary_title": "Щось пішло не так", + "errors": { + "field_required": "Це поле обов'язкове.", + "rate_limit_exceeded": "Ви зробили забагато запитів за короткий проміжок часу. Зачекайте кілька хвилин і повторіть спробу." + }, + "last_active": { + "active_date": "Активний {{relativeDate}}", + "active_now": "Активний зараз", + "inactive_90_days": "Неактивний понад 90 днів" + }, + "nav": { + "devices": "Пристрої", + "plan": "План", + "profile": "Профіль", + "sessions": "Сеанси", + "settings": "Налаштування" + }, + "not_found_alert_title": "Не знайдено.", + "not_logged_in_alert": "Ви не ввійшли в систему.", + "oauth2_client_detail": { + "details_title": "Інформація про клієнт", + "id": "ID клієнта", + "name": "Ім'я", + "policy": "Політика", + "terms": "Умови надання послуг" + }, + "oauth2_session_detail": { + "client_details_name": "Ім'я", + "client_title": "Інформація про клієнт", + "session_details_title": "Сеанс" + }, + "pagination_controls": { + "total": "Всього:{{totalCount}}" + }, + "password_change": { + "current_password_label": "Поточний пароль", + "failure": { + "description": { + "account_locked": "Ваш обліковий запис заблокований і не може бути відновлений на цей час. Якщо цього не очікується, зверніться до адміністратора сервера.", + "expired_recovery_ticket": "Посилання для відновлення застаріло. Розпочніть процес відновлення облікового запису спочатку.", + "invalid_new_password": "Обраний вами новий пароль неприпустимий; він може не відповідати налаштованій політиці безпеки.", + "no_current_password": "У вас немає поточного пароля.", + "no_such_recovery_ticket": "Посилання для відновлення недійсне. Якщо ви скопіювали посилання з електронної пошти для відновлення, перевірте, чи скопійовано повне посилання.", + "password_changes_disabled": "Зміна пароля вимкнена.", + "recovery_ticket_already_used": "Посилання для відновлення вже використано. Його не можна використовувати повторно.", + "unspecified": "Це може бути тимчасова проблема, тому спробуйте пізніше. Якщо проблема не зникає, зверніться до адміністратора свого сервера.", + "wrong_password": "Пароль, який ви вказали як свій поточний пароль, неправильний. Спробуйте ще раз." + }, + "title": "Не вдалося оновити пароль" + }, + "new_password_again_label": "Введіть новий пароль ще раз", + "new_password_label": "Новий пароль", + "passwords_match": "Паролі збігаються!", + "passwords_no_match": "Паролі не збігаються", + "subtitle": "Виберіть новий пароль для свого облікового запису.", + "success": { + "description": "Ваш пароль успішно оновлено.", + "title": "Пароль оновлено" + }, + "title": "Змініть свій пароль" + }, + "password_reset": { + "consumed": { + "subtitle": "Щоб створити новий пароль, почніть спочатку і виберіть «Забули пароль».", + "title": "Посилання для скидання пароля вже використано" + }, + "expired": { + "resend_email": "Повторно надіслати електронний лист", + "subtitle": "Запит на новий електронний лист, який буде надіслано на адресу: {{email}}", + "title": "Посилання для скидання пароля застаріло" + }, + "subtitle": "Виберіть новий пароль для свого облікового запису.", + "title": "Скидання пароля" + }, + "password_strength": { + "placeholder": "Надійність пароля", + "score": { + "0": "Надзвичайно слабкий пароль", + "1": "Дуже слабкий пароль", + "2": "Слабкий пароль", + "3": "Надійний пароль", + "4": "Дуже надійний пароль" + }, + "suggestion": { + "all_uppercase": "Використайте великі букви, але не всі.", + "another_word": "Додайте більше менш вживаних слів.", + "associated_years": "Уникайте років, які пов'язані з вами.", + "capitalization": "Використайте більше великих букв, не лише першу.", + "dates": "Уникайте дат і років, які пов'язані з вами.", + "l33t": "Уникайте передбачуваних замін букв, таких як «@» замість «a».", + "longer_keyboard_pattern": "Використовуйте довші патерни клавіатури та змінюйте напрямок друку кілька разів.", + "no_need": "Ви можете створювати надійні паролі не вживаючи символів, цифр або великих букв.", + "pwned": "Якщо ви використовуєте цей пароль ще десь, вам слід змінити його.", + "recent_years": "Уникайте останніх років.", + "repeated": "Уникайте повторювання слів і символів.", + "reverse_words": "Уникайте зворотного написання звичайних слів.", + "sequences": "Уникайте типових послідовностей символів.", + "use_words": "Використовуйте кілька слів, але уникайте поширених фраз." + }, + "too_weak": "Цей пароль занадто слабкий", + "warning": { + "common": "Це часто використовуваний пароль.", + "common_names": "Поширені імена та прізвища легко вгадати.", + "dates": "Дати легко вгадати.", + "extended_repeat": "Повторювані шаблони символів, такі як \"abcabcabc\", легко вгадати.", + "key_pattern": "Короткі послідовності клавіш легко вгадати.", + "names_by_themselves": "Поодинокі імена або прізвища легко вгадати.", + "pwned": "Ваш пароль розкрито внаслідок витоку даних в інтернеті.", + "recent_years": "Пароль із нещодавніми роками легко вгадати.", + "sequences": "Поширені послідовності символів, такі як «abc», легко вгадати.", + "similar_to_common": "Це схоже на часто використовуваний пароль.", + "simple_repeat": "Повторювані символи, такі як «ааа», легко вгадати.", + "straight_row": "Прямі послідовності клавіш на клавіатурі легко вгадати.", + "top_hundred": "Це часто використовуваний пароль.", + "top_ten": "Це широко використовуваний пароль.", + "user_inputs": "Не повинно бути жодних особистих даних або даних, пов'язаних зі сторінкою.", + "word_by_itself": "Окремі слова легко вгадати." + } + }, + "reset_cross_signing": { + "button": "Скинути ідентичність", + "cancelled": { + "description_1": "Ви можете закрити це вікно та повернутися до застосунку, щоб продовжити.", + "description_2": "Якщо ви не ввійшли в обліковий запис і не пам'ятаєте код відновлення, вам усе одно доведеться скинути свої налаштування.", + "heading": "Скидання ідентичності скасовано." + }, + "description": "Якщо ви не ввійшли в обліковий запис на інших пристроях і втратили ключ відновлення, вам потрібно буде скинути свою ідентичність, щоб продовжити користуватися застосунком.", + "effect_list": { + "negative_1": "Ви втратите наявну історію повідомлень", + "negative_2": "Вам потрібно буде знову верифікувати всі наявні пристрої та контакти", + "neutral_1": "Ви втратите історію повідомлень, яка зберігається лише на сервері", + "neutral_2": "Вам потрібно буде знову верифікувати всі наявні пристрої та контакти", + "positive_1": "Ваші дані облікового запису, контакти, налаштування та список бесід будуть збережені" + }, + "failure": { + "description": "Це може бути тимчасова проблема, тому спробуйте пізніше. Якщо проблема не зникає, зверніться до адміністратора свого сервера.", + "heading": "Не вдалося дозволити скидання криптоідентичності", + "title": "Не вдалося дозволити криптоідентичність" + }, + "finish_reset": "Завершити скидання", + "heading": "Скиньте свій обліковий запис, якщо не можете підтвердити його іншим способом", + "start_reset": "Почати скидання", + "success": { + "description": "Скидання профілю схвалено на наступні {{minutes}} хвилин. Ви можете закрити це вікно та повернутися до застосунку, щоб продовжити.", + "heading": "Облікові дані успішно скинуто. Поверніться до застосунку, щоб завершити процес.", + "title": "Скидання криптоідентичності тимчасово дозволено" + }, + "warning": "Скидайте свій обліковий запис, тільки якщо у вас немає доступу до іншого пристрою, на якому ви ввійшли в систему, і ви загубили ключ відновлення." + }, + "selectable_session": { + "label": "Вибрати сеанс" + }, + "session": { + "client_id_label": "ID клієнта", + "current": "Поточний", + "current_badge": "Поточний", + "device_id_label": "ID пристрою", + "finished_date": "Завершено ", + "finished_label": "Завершено", + "generic_browser_session": "Сеанс браузера", + "id_label": "ID", + "ip_label": "IP-адреса", + "last_active_label": "Остання активність", + "last_auth_label": "Остання автентифікація", + "name_for_platform": "{{name}} для {{platform}}", + "scopes_label": "Області застосування (Scopes)", + "set_device_name": { + "help": "Вкажіть назву, яка допоможе вам ідентифікувати цей пристрій.", + "label": "Назва пристрою", + "title": "Змінити назву пристрою" + }, + "signed_in_date": "Вхід виконано ", + "signed_in_label": "Вхід виконано", + "title": "Деталі пристрою", + "unknown_browser": "Невідомий браузер", + "unknown_device": "Невідомий пристрій", + "uri_label": "Uri", + "user_id_label": "ID користувача", + "username_label": "Ім'я користувача" + }, + "session_detail": { + "alert": { + "button": "Назад", + "text": "Цього сеансу не існує або він більше не активний.", + "title": "Не вдалося знайти сеанс: {{deviceId}}" + } + }, + "unknown_route": "Невідомий роут {{route}}", + "unverified_email_alert": { + "button": "Переглянути та підтвердити", + "text:one": "У вас є {{count}} непідтверджена адреса електронної пошти.", + "text:few": "У вас є {{count}} непідтверджені адреси електронної пошти.", + "text:many": "У вас є {{count}} непідтверджених адрес електронної пошти.", + "title": "Непідтверджена електронна адреса" + }, + "user_email": { + "cant_delete_primary": "Виберіть іншу основну електронну адресу, щоб видалити цю.", + "delete_button_confirmation_modal": { + "action": "Видалити електронну пошту", + "body": "Видалити цю електронну пошту?", + "incorrect_password": "Неправильний пароль. Повторіть спробу.", + "password_confirmation": "Підтвердьте пароль свого облікового запису, щоб видалити цю адресу електронної пошти" + }, + "delete_button_title": "Видалити електронну адресу", + "email": "Електронна пошта", + "make_primary_button": "Зробити основною", + "not_verified": "Не підтверджено", + "primary_email": "Основна електронна адреса", + "retry_button": "Надіслати код повторно", + "unverified": "Неперевірена" + }, + "user_email_list": { + "heading": "Електронні адреси", + "no_primary_email_alert": "Немає основної адреси електронної пошти" + }, + "user_greeting": { + "error": "Не вдалося завантажити користувача" + }, + "user_name": { + "display_name_field_label": "Псевдонім" + }, + "user_sessions_overview": { + "active_sessions:one": "{{count}} активний сеанс", + "active_sessions:few": "{{count}} активні сеанси", + "active_sessions:many": "{{count}} активних сеансів", + "heading": "Де ви ввійшли", + "no_active_sessions": { + "default": "Ви не ввійшли в жоден застосунок.", + "inactive_90_days": "Усі ваші сеанси були активними протягом останніх 90 днів." + } + }, + "verify_email": { + "code_expired_alert": { + "description": "Термін дії коду закінчився. Будь ласка, надішліть запит на новий код.", + "title": "Код застарів" + }, + "code_field_error": "Код не розпізнано", + "code_field_label": "6-значний код", + "code_field_wrong_shape": "Код повинен складатися з 6 цифр", + "email_sent_alert": { + "description": "Введіть новий код нижче.", + "title": "Новий код надіслано" + }, + "enter_code_prompt": "Введіть 6-значний код, надісланий на адресу: {{email}} ", + "heading": "Підтвердьте свою електронну адресу", + "invalid_code_alert": { + "description": "Перевірте код, надісланий на вашу електронну пошту, та оновіть поля нижче, щоб продовжити.", + "title": "Ви ввели неправильний код" + }, + "resend_code": "Надіслати код повторно", + "resend_email": "Повторно надіслати електронний лист", + "sent": "Надіслано!", + "unknown_email": "Невідома електронна пошта" + } + }, + "mas": { + "scope": { + "edit_profile": "Редагування профілю та контактних даних", + "manage_sessions": "Керування пристроями та сеансами", + "mas_admin": "Керувати користувачами (urn:mas:admin)", + "send_messages": "Надсилати нові повідомлення від вашого імені", + "synapse_admin": "Адмініструвати домашній сервер (urn:synapse:admin:*)", + "view_messages": "Перегляд наявних повідомлень і даних", + "view_profile": "Перегляд інформації профілю та контактних даних" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/locales/zh-Hans.json b/matrix-authentication-service/frontend/locales/zh-Hans.json new file mode 100644 index 00000000..ec5b8534 --- /dev/null +++ b/matrix-authentication-service/frontend/locales/zh-Hans.json @@ -0,0 +1,398 @@ +{ + "action": { + "back": "返回", + "cancel": "取消", + "clear": "清除", + "close": "关闭", + "collapse": "折叠", + "confirm": "确认", + "continue": "继续", + "edit": "编辑", + "expand": "展开", + "save": "保存", + "save_and_continue": "保存并继续", + "sign_out": "注销", + "start_over": "重新开始" + }, + "branding": { + "privacy_policy": { + "alt": "服务隐私政策链接", + "link": "隐私政策" + }, + "terms_and_conditions": { + "alt": "服务条款和条件链接", + "link": "条款与条件" + } + }, + "common": { + "add": "添加", + "e2ee": "端到端加密", + "error": "错误", + "loading": "加载中...", + "next": "下一页", + "password": "密码", + "previous": "上一页", + "saved": "已保存", + "saving": "正在保存..." + }, + "frontend": { + "account": { + "account_password": "账户密码", + "contact_info": "联系方式", + "delete_account": { + "alert_description": "此账户将被永久删除,你将无法再访问任何消息。", + "alert_title": "你将丢失所有数据", + "button": "删除账户", + "dialog_description": "确认你想删除账户:\n\n\n你将无法重新激活账户\n你将无法再登录\n包括你在内,没有人能重复使用此用户名(MXID)\n你将离开所有房间与私聊\n你将被从身份服务器中移除,遂没有人能通过你的邮件地址或电话号码找到你\n\n收到过你曾经的消息的人员仍然能看到你的历史消息。是否向未来加入房间的人员隐藏你发送的消息?", + "dialog_title": "删除此账户?", + "erase_checkbox_label": "是,对新加入者隐藏我的所有消息", + "incorrect_password": "密码不正确,请重试", + "mxid_label": "确认你的 Matrix ID({{ mxid }})", + "mxid_mismatch": "此值与你的 Matrix ID 不匹配", + "password_label": "输入密码以继续" + }, + "edit_profile": { + "display_name_help": "这是其他人在您登录的地方将看到的信息。", + "display_name_label": "显示名称", + "title": "编辑个人资料", + "username_label": "用户名" + }, + "password": { + "change": "更改密码", + "change_disabled": "管理员禁止更改密码。", + "label": "密码" + }, + "sign_out": { + "button": "注销登录", + "dialog": "注销此账户?" + }, + "title": "你的账户" + }, + "add_email_form": { + "email_denied_alert": { + "text": "服务器策略不允许输入的电子邮件地址。", + "title": "电子邮件地址被策略拒绝" + }, + "email_denied_error": "服务器策略不允许输入的邮箱地址。", + "email_exists_alert": { + "text": "输入的电子邮件地址已添加到此账户", + "title": "电子邮件地址已经存在" + }, + "email_exists_error": "输入的电子邮件地址已添加到此账户", + "email_field_help": "添加可用于访问此账户的备用电子邮件地址。", + "email_field_label": "添加电子邮件地址", + "email_in_use_error": "输入的邮件地址已被使用", + "email_invalid_alert": { + "text": "输入的电子邮件地址无效", + "title": "无效的电子邮件地址" + }, + "email_invalid_error": "输入的电子邮件地址无效", + "incorrect_password_error": "密码不正确,请重试", + "password_confirmation": "确认账户密码以添加此邮件地址" + }, + "app_sessions_list": { + "error": "加载应用程序会话失败", + "heading": "应用" + }, + "browser_session_details": { + "current_badge": "当前", + "session_details_title": "会话" + }, + "browser_sessions_overview": { + "body:other": "{{count}}活跃会话", + "heading": "浏览器", + "no_active_sessions": { + "default": "你尚未登录任何 Web 浏览器。", + "inactive_90_days": "你的所有会话在过去 90 天内均处于活跃状态。" + }, + "view_all_button": "查看全部" + }, + "compat_session_detail": { + "client_details_title": "客户端", + "name": "名称", + "session_details_title": "会话" + }, + "device_type_icon_label": { + "desktop": "桌面", + "mobile": "手机", + "pc": "计算机", + "tablet": "平板电脑", + "unknown": "未知设备类型", + "web": "网页" + }, + "email_in_use": { + "heading": "此邮件地址 {{email}} 已被使用。" + }, + "end_session_button": { + "confirmation_modal_title": "你确定要结束这个会话吗?", + "text": "移除设备" + }, + "error": { + "hideDetails": "隐藏详细信息", + "showDetails": "显示详细信息", + "subtitle": "出现意外错误。请重试。", + "title": "出了点问题" + }, + "error_boundary_title": "出了点问题", + "errors": { + "field_required": "此字段为必填项", + "rate_limit_exceeded": "你在短时间内发出了过多请求。请于几分钟后重试。" + }, + "last_active": { + "active_date": "活跃 {{relativeDate}}", + "active_now": "活跃", + "inactive_90_days": "已停用90天以上" + }, + "nav": { + "devices": "设备", + "plan": "Plan", + "profile": "个人资料", + "sessions": "会话", + "settings": "设置" + }, + "not_found_alert_title": "未找到。", + "not_logged_in_alert": "尚未登录。", + "oauth2_client_detail": { + "details_title": "客户端", + "id": "客户端ID", + "name": "名称", + "policy": "策略", + "terms": "服务条款" + }, + "oauth2_session_detail": { + "client_details_name": "名称", + "client_title": "客户端", + "session_details_title": "会话" + }, + "pagination_controls": { + "total": "总计:{{totalCount}}" + }, + "password_change": { + "current_password_label": "当前密码", + "failure": { + "description": { + "account_locked": "你的账户已被锁定,暂时无法恢复。如果这并非预期,请联系服务器管理员。", + "expired_recovery_ticket": "恢复链接已过期。请从头重新开始账户恢复流程。", + "invalid_new_password": "你指定的新密码无效;它可能不符合安全策略配置。", + "no_current_password": "你当前没有密码。", + "no_such_recovery_ticket": "恢复链接无效。如果链接是从恢复电子邮件中复制的,请检查复制的链接是否完整。", + "password_changes_disabled": "密码更改已禁用。", + "recovery_ticket_already_used": "恢复链接已被使用。无法再次使用。", + "unspecified": "这可能是暂时的问题,请稍后再试。如果问题仍然存在,请联系服务器管理员。", + "wrong_password": "您提供的当前密码不正确。请再试一次。" + }, + "title": "更新密码失败" + }, + "new_password_again_label": "再次输入新密码", + "new_password_label": "新密码", + "passwords_match": "密码匹配!", + "passwords_no_match": "密码不匹配", + "subtitle": "为账户指定一个新密码。", + "success": { + "description": "密码已成功更新。", + "title": "密码已更新" + }, + "title": "更改密码" + }, + "password_reset": { + "consumed": { + "subtitle": "要创建新密码,请重新开始并选择“忘记密码”。", + "title": "重置密码的链接已被使用" + }, + "expired": { + "resend_email": "重新发送电子邮件", + "subtitle": "请求发送新邮件到:{{email}}", + "title": "重置密码的链接已过期" + }, + "subtitle": "为账户指定一个新密码。", + "title": "重置密码" + }, + "password_strength": { + "placeholder": "密码强度", + "score": { + "0": "极弱的密码", + "1": "非常弱的密码", + "2": "弱密码", + "3": "强密码", + "4": "非常强的密码" + }, + "suggestion": { + "all_uppercase": "将部分字母大写,但不是全部字母大写。", + "another_word": "添加更多不常用的单词。", + "associated_years": "避开与你有关的年份。", + "capitalization": "不止首字母需要大写。", + "dates": "避免使用与你有关的日期与年份。", + "l33t": "避免可预见的字母替换,例如用\"@\"替换 \"a\"。", + "longer_keyboard_pattern": "使用较长的键盘模式,并多次改变输入方向。", + "no_need": "不使用符号、数字或大写字母也能创建强密码。", + "pwned": "如果你在其它地方使用该密码,则应进行更改。", + "recent_years": "避免近几年。", + "repeated": "避免重复的词语和字符。", + "reverse_words": "避免使用常用单词的反向拼写。", + "sequences": "避免使用常见的字符序列。", + "use_words": "使用多个单词,但避免使用常用短语。" + }, + "too_weak": "密码太弱", + "warning": { + "common": "这是一个常用密码。", + "common_names": "常见的名字和姓氏很容易被猜到。", + "dates": "日期很容易被猜到。", + "extended_repeat": "像“abcabcabc”这样的重复字符模式很容易被猜到。", + "key_pattern": "短键盘模式很容易被猜到。", + "names_by_themselves": "单个名字或姓氏很容易被猜到。", + "pwned": "你的密码因 Internet 上的数据泄露而被泄露。", + "recent_years": "最近几年很容易被猜到。", + "sequences": "像“abc”这样的常见字符序列很容易被猜到。", + "similar_to_common": "这与常用密码类似。", + "simple_repeat": "像“aaa”这样的重复字符很容易被猜到。", + "straight_row": "键盘上成排的按键很容易被猜到。", + "top_hundred": "这是一个常用密码。", + "top_ten": "这是一个使用率很高的密码。", + "user_inputs": "不应有任何个人或页面相关数据。", + "word_by_itself": "单个单词很容易被猜到。" + } + }, + "reset_cross_signing": { + "button": "重置身份", + "cancelled": { + "description_1": "你可以关闭此窗口并返回到 App 以继续。", + "description_2": "若你在任何地方都已注销并且忘记恢复代码,你仍然需要重置身份。", + "heading": "身份重置流程已被取消。" + }, + "description": "如果你没有在其它地方登录,并且忘记或丢失了恢复密钥,则需要重置加密身份才能继续使用 app。", + "effect_list": { + "negative_1": "你将丢失现有消息历史", + "negative_2": "你将需要再次验证所有现有设备与联系人", + "neutral_1": "你将丢失仅存储在服务器上的消息历史", + "neutral_2": "你将需要再次验证所有现有设备与联系人", + "positive_1": "你的账户的详细信息、联系人、偏好与聊天列表都将被保留" + }, + "failure": { + "description": "这可能是暂时的问题,请稍后再试。如果问题仍然存在,请联系服务器管理员。", + "heading": "加密身份重置授权失败。", + "title": "无法允许加密身份" + }, + "finish_reset": "完成重置", + "heading": "如果你无法通过其它方式确认请重置身份", + "start_reset": "开始重置", + "success": { + "description": "身份重置已获批准,有效时间为 {{minutes}} 分钟。您可以关闭此窗口并返回应用继续操作。", + "heading": "已成功重置身份。返回到 App 以完成此流程。", + "title": "临时允许重置加密身份" + }, + "warning": "仅当你无法访问其它已登录的设备并且丢失了恢复密钥时才重置身份。" + }, + "selectable_session": { + "label": "选择会话" + }, + "session": { + "client_id_label": "客户端ID", + "current": "当前", + "current_badge": "当前", + "device_id_label": "设备 ID", + "finished_date": "已完成 ", + "finished_label": "已完成", + "generic_browser_session": "浏览器会话", + "id_label": "ID", + "ip_label": "IP 地址", + "last_active_label": "最后活动", + "last_auth_label": "最后认证", + "name_for_platform": "{{name}}对于 {{platform}}", + "scopes_label": "范围", + "set_device_name": { + "help": "设置一个名称有助于识别此设备。", + "label": "设备名称", + "title": "编辑设备名称" + }, + "signed_in_date": "已登录", + "signed_in_label": "已登录", + "title": "设备详情", + "unknown_browser": "未知浏览器", + "unknown_device": "未知设备", + "uri_label": "Uri", + "user_id_label": "用户 ID", + "username_label": "用户名" + }, + "session_detail": { + "alert": { + "button": "返回", + "text": "该会话不存在,或不再处于活动状态。", + "title": "无法找到会话:{{deviceId}}" + } + }, + "unknown_route": "未知路线 {{route}}", + "unverified_email_alert": { + "button": "审查并验证", + "text:other": "你有 {{count}} 个未经验证的邮件地址。", + "title": "未经验证的电子邮件地址" + }, + "user_email": { + "cant_delete_primary": "选择其他主电子邮件地址,删除该电子邮件地址。", + "delete_button_confirmation_modal": { + "action": "删除电子邮件地址", + "body": "确定要删除此邮件地址?", + "incorrect_password": "密码不正确,请重试", + "password_confirmation": "确认账户密码以删除此邮件地址" + }, + "delete_button_title": "删除电子邮件地址", + "email": "电子邮件地址", + "make_primary_button": "设为主要", + "not_verified": "未验证", + "primary_email": "主电子邮件地址", + "retry_button": "重新发送代码", + "unverified": "未验证" + }, + "user_email_list": { + "heading": "电子邮件地址", + "no_primary_email_alert": "没有主电子邮件地址" + }, + "user_greeting": { + "error": "加载用户失败" + }, + "user_name": { + "display_name_field_label": "显示名称" + }, + "user_sessions_overview": { + "active_sessions:other": "{{count}}活跃会话", + "heading": "你已登录的位置", + "no_active_sessions": { + "default": "你尚未登录任何 app。", + "inactive_90_days": "你的所有会话在过去 90 天内均处于活跃状态。" + } + }, + "verify_email": { + "code_expired_alert": { + "description": "此代码已过期,请重新请求新代码。", + "title": "代码已过期" + }, + "code_field_error": "无法识别代码", + "code_field_label": "6位数代码", + "code_field_wrong_shape": "代码必须是6位数字", + "email_sent_alert": { + "description": "请在下方输入新代码。", + "title": "新代码已发送" + }, + "enter_code_prompt": "输入发送至以下地址的6位数代码:{{email}}", + "heading": "验证邮箱", + "invalid_code_alert": { + "description": "检查发送到你的邮件地址中的代码,并更新以下字段以继续。", + "title": "你输入的代码错误" + }, + "resend_code": "重新发送代码", + "resend_email": "重新发送电子邮件", + "sent": "已发送!", + "unknown_email": "未知的电子邮件地址" + } + }, + "mas": { + "scope": { + "edit_profile": "编辑个人资料和联系方式", + "manage_sessions": "管理设备和会话", + "mas_admin": "管理 matrix-authentication-service 上的用户", + "send_messages": "以你的名义发送新消息", + "synapse_admin": "管理 Synapse 服务器", + "view_messages": "查看现有信息和数据", + "view_profile": "查看个人资料信息和联系方式" + } + } +} \ No newline at end of file diff --git a/matrix-authentication-service/frontend/package-lock.json b/matrix-authentication-service/frontend/package-lock.json new file mode 100644 index 00000000..78c33a11 --- /dev/null +++ b/matrix-authentication-service/frontend/package-lock.json @@ -0,0 +1,13509 @@ +{ + "name": "mas-frontend", + "version": "0.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "mas-frontend", + "version": "0.0.0", + "dependencies": { + "@fontsource/inconsolata": "^5.2.8", + "@fontsource/inter": "^5.2.8", + "@radix-ui/react-collapsible": "^1.1.12", + "@radix-ui/react-dialog": "^1.1.15", + "@tanstack/react-query": "^5.90.21", + "@tanstack/react-router": "^1.150.0", + "@vector-im/compound-design-tokens": "6.4.3", + "@vector-im/compound-web": "^8.3.5", + "@zxcvbn-ts/core": "^3.0.4", + "@zxcvbn-ts/language-common": "^3.0.4", + "classnames": "^2.5.1", + "date-fns": "^4.1.0", + "i18next": "^25.8.10", + "react": "^19.2.4", + "react-dom": "^19.2.4", + "react-i18next": "^16.5.4", + "swagger-ui-dist": "^5.31.0", + "valibot": "^1.2.0", + "vaul": "^1.1.2" + }, + "devDependencies": { + "@biomejs/biome": "^2.3.9", + "@browser-logos/chrome": "^2.0.0", + "@browser-logos/firefox": "^3.0.10", + "@browser-logos/safari": "^2.1.0", + "@graphql-codegen/cli": "^6.1.1", + "@graphql-codegen/client-preset": "^5.2.2", + "@graphql-codegen/typescript-msw": "^3.0.1", + "@storybook/addon-docs": "^10.2.9", + "@storybook/react-vite": "^10.2.9", + "@tanstack/react-query-devtools": "^5.91.3", + "@tanstack/react-router-devtools": "^1.150.0", + "@tanstack/router-plugin": "^1.150.0", + "@testing-library/jest-dom": "^6.9.1", + "@testing-library/react": "^16.3.1", + "@testing-library/user-event": "^14.6.1", + "@types/node": "^25.2.3", + "@types/react": "19.2.14", + "@types/react-dom": "19.2.3", + "@types/swagger-ui-dist": "^3.30.6", + "@vitejs/plugin-react": "^5.1.4", + "@vitest/coverage-v8": "^4.0.18", + "autoprefixer": "^10.4.23", + "browserslist-to-esbuild": "^2.1.1", + "graphql": "^16.12.0", + "happy-dom": "^20.5.0", + "i18next-cli": "^1.42.9", + "knip": "^5.81.0", + "msw": "^2.12.8", + "msw-storybook-addon": "^2.0.6", + "postcss": "^8.5.6", + "postcss-import": "^16.1.1", + "postcss-nesting": "^14.0.0", + "rimraf": "^6.1.2", + "storybook": "^10.1.11", + "tailwindcss": "^3.4.19", + "tinyglobby": "^0.2.15", + "typescript": "^5.9.3", + "vite": "7.3.1", + "vite-plugin-graphql-codegen": "^3.8.0", + "vitest": "^4.0.15" + } + }, + "node_modules/@adobe/css-tools": { + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.4.tgz", + "integrity": "sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@ardatan/relay-compiler": { + "version": "12.0.3", + "resolved": "https://registry.npmjs.org/@ardatan/relay-compiler/-/relay-compiler-12.0.3.tgz", + "integrity": "sha512-mBDFOGvAoVlWaWqs3hm1AciGHSQE1rqFc/liZTyYz/Oek9yZdT5H26pH2zAFuEiTiBVPPyMuqf5VjOFPI2DGsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/generator": "^7.26.10", + "@babel/parser": "^7.26.10", + "@babel/runtime": "^7.26.10", + "chalk": "^4.0.0", + "fb-watchman": "^2.0.0", + "immutable": "~3.7.6", + "invariant": "^2.2.4", + "nullthrows": "^1.1.1", + "relay-runtime": "12.0.0", + "signedsource": "^1.0.0" + }, + "bin": { + "relay-compiler": "bin/relay-compiler" + }, + "peerDependencies": { + "graphql": "*" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", + "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.5.tgz", + "integrity": "sha512-q3WC4JfdODypvxArsJQROfupPBq9+lMwjKq7C33GhbFYJsufD0yd/ziwD+hJucLeWsnFPWZjsU2DNFqBPE7jwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-member-expression-to-functions": "^7.28.5", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/traverse": "^7.28.5", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.28.5.tgz", + "integrity": "sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", + "integrity": "sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", + "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.27.1.tgz", + "integrity": "sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", + "integrity": "sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-proposal-class-properties": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", + "integrity": "sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==", + "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-class-properties instead.", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-object-rest-spread": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz", + "integrity": "sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg==", + "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-object-rest-spread instead.", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.20.5", + "@babel/helper-compilation-targets": "^7.20.7", + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.20.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-flow": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.27.1.tgz", + "integrity": "sha512-p9OkPbZ5G7UT1MofwYFigGebnrzGJacoBSQM0/6bi/PUMVE+qlWDD/OalvQKbwgQzU6dl0xAv6r4X7Jme0RYxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-assertions": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.27.1.tgz", + "integrity": "sha512-UT/Jrhw57xg4ILHLFnzFpPDlMbcdEicaAtjPQpbj9wa8T4r5KVWCimHcL/460g8Ht0DMxDyjsLgiWSkVjnwPFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", + "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", + "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-arrow-functions": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.27.1.tgz", + "integrity": "sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoped-functions": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.27.1.tgz", + "integrity": "sha512-cnqkuOtZLapWYZUYM5rVIdv1nXYuFVIltZ6ZJ7nIj585QsjKM5dhL2Fu/lICXZ1OyIAFc7Qy+bvDAtTXqGrlhg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoping": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.28.5.tgz", + "integrity": "sha512-45DmULpySVvmq9Pj3X9B+62Xe+DJGov27QravQJU1LLcapR6/10i+gYVAucGGJpHBp5mYxIMK4nDAT/QDLr47g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-classes": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.28.4.tgz", + "integrity": "sha512-cFOlhIYPBv/iBoc+KS3M6et2XPtbT2HiCRfBXWtfpc9OAyostldxIf9YAYB6ypURBBbx+Qv6nyrLzASfJe+hBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-globals": "^7.28.0", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/traverse": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-computed-properties": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.27.1.tgz", + "integrity": "sha512-lj9PGWvMTVksbWiDT2tW68zGS/cyo4AkZ/QTp0sQT0mjPopCmrSkzxeXkznjqBxzDI6TclZhOJbBmbBLjuOZUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/template": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-destructuring": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.28.5.tgz", + "integrity": "sha512-Kl9Bc6D0zTUcFUvkNuQh4eGXPKKNDOJQXVyyM4ZAQPMveniJdxi8XMJwLo+xSoW3MIq81bD33lcUe9kZpl0MCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-flow-strip-types": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.27.1.tgz", + "integrity": "sha512-G5eDKsu50udECw7DL2AcsysXiQyB7Nfg521t2OAJ4tbfTJ27doHLeF/vlI1NZGlLdbb/v+ibvtL1YBQqYOwJGg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/plugin-syntax-flow": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-for-of": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.27.1.tgz", + "integrity": "sha512-BfbWFFEJFQzLCQ5N8VocnCtA8J1CLkNTe2Ms2wocj75dd6VpiqS5Z5quTYcUoo4Yq+DN0rtikODccuv7RU81sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-function-name": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.27.1.tgz", + "integrity": "sha512-1bQeydJF9Nr1eBCMMbC+hdwmRlsv5XYOMu03YSWFwNs0HsAmtSxxF1fyuYPqemVldVyFmlCU7w8UE14LupUSZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-compilation-targets": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-literals": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.27.1.tgz", + "integrity": "sha512-0HCFSepIpLTkLcsi86GG3mTUzxV5jpmbv97hTETW3yzrAij8aqlD36toB1D0daVFJM8NK6GvKO0gslVQmm+zZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-member-expression-literals": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.27.1.tgz", + "integrity": "sha512-hqoBX4dcZ1I33jCSWcXrP+1Ku7kdqXf1oeah7ooKOIiAdKQ+uqftgCFNOSzA5AMS2XIHEYeGFg4cKRCdpxzVOQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.27.1.tgz", + "integrity": "sha512-OJguuwlTYlN0gBZFRPqwOGNWssZjfIUdS7HMYtN8c1KmwpwHFBwTeFZrg9XZa+DFTitWOW5iTAG7tyCUPsCCyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-super": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.27.1.tgz", + "integrity": "sha512-SFy8S9plRPbIcxlJ8A6mT/CxFdJx/c04JEctz4jf8YZaVS2px34j7NXRrlGlHkN/M2gnpL37ZpGRGVFLd3l8Ng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-parameters": { + "version": "7.27.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.27.7.tgz", + "integrity": "sha512-qBkYTYCb76RRxUM6CcZA5KRu8K4SM8ajzVeUgVdMVO9NN9uI/GaVmBg/WKJJGnNokV9SY8FxNOVWGXzqzUidBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-property-literals": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.27.1.tgz", + "integrity": "sha512-oThy3BCuCha8kDZ8ZkgOg2exvPYUlprMukKQXI1r1pJ47NCvxfkEy8vK+r/hT9nF0Aa4H1WUPZZjHTFtAhGfmQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-display-name": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.28.0.tgz", + "integrity": "sha512-D6Eujc2zMxKjfa4Zxl4GHMsmhKKZ9VpcqIchJLvwTxad9zWIYulwYItBovpDOoNLISpcZSXoDJ5gaGbQUDqViA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.27.1.tgz", + "integrity": "sha512-2KH4LWGSrJIkVf5tSiBFYuXDAoWRq2MMwgivCf+93dd0GQi8RXLjKA/0EvRnVV5G0hrHczsquXuD01L8s6dmBw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.1", + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/plugin-syntax-jsx": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-shorthand-properties": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.27.1.tgz", + "integrity": "sha512-N/wH1vcn4oYawbJ13Y/FxcQrWk63jhfNa7jef0ih7PHSIHX2LB7GWE1rkPrOnka9kwMxb6hMl19p7lidA+EHmQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-spread": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.27.1.tgz", + "integrity": "sha512-kpb3HUqaILBJcRFVhFUs6Trdd4mkrzcGXss+6/mxUd273PfbWqSDHRzMT2234gIg2QYfAjvXLSquP1xECSg09Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-template-literals": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.27.1.tgz", + "integrity": "sha512-fBJKiV7F2DxZUkg5EtHKXQdbsbURW3DZKQUWphDum0uRP6eHGGa/He9mc0mypL680pb+e/lDIthRohlv8NCHkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", + "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz", + "integrity": "sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@biomejs/biome": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.3.9.tgz", + "integrity": "sha512-js+34KpnY65I00k8P71RH0Uh2rJk4BrpxMGM5m2nBfM9XTlKE5N1URn5ydILPRyXXq4ebhKCjsvR+txS+D4z2A==", + "dev": true, + "license": "MIT OR Apache-2.0", + "bin": { + "biome": "bin/biome" + }, + "engines": { + "node": ">=14.21.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/biome" + }, + "optionalDependencies": { + "@biomejs/cli-darwin-arm64": "2.3.9", + "@biomejs/cli-darwin-x64": "2.3.9", + "@biomejs/cli-linux-arm64": "2.3.9", + "@biomejs/cli-linux-arm64-musl": "2.3.9", + "@biomejs/cli-linux-x64": "2.3.9", + "@biomejs/cli-linux-x64-musl": "2.3.9", + "@biomejs/cli-win32-arm64": "2.3.9", + "@biomejs/cli-win32-x64": "2.3.9" + } + }, + "node_modules/@biomejs/cli-darwin-arm64": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.9.tgz", + "integrity": "sha512-hHbYYnna/WBwem5iCpssQQLtm5ey8ADuDT8N2zqosk6LVWimlEuUnPy6Mbzgu4GWVriyL5ijWd+1zphX6ll4/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-darwin-x64": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.9.tgz", + "integrity": "sha512-sKMW5fpvGDmPdqCchtVH5MVlbVeSU3ad4CuKS45x8VHt3tNSC8CZ2QbxffAOKYK9v/mAeUiPC6Cx6+wtyU1q7g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.9.tgz", + "integrity": "sha512-BXBB6HbAgZI6T6QB8q6NSwIapVngqArP6K78BqkMerht7YjL6yWctqfeTnJm0qGF2bKBYFexslrbV+VTlM2E6g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64-musl": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.9.tgz", + "integrity": "sha512-JOHyG2nl8XDpncbMazm1uBSi1dPX9VbQDOjKcfSVXTqajD0PsgodMOKyuZ/PkBu5Lw877sWMTGKfEfpM7jE7Cw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.9.tgz", + "integrity": "sha512-PjYuv2WLmvf0WtidxAkFjlElsn0P6qcvfPijrqu1j+3GoW3XSQh3ywGu7gZ25J25zrYj3KEovUjvUZB55ATrGw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64-musl": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.9.tgz", + "integrity": "sha512-FUkb/5beCIC2trpqAbW9e095X4vamdlju80c1ExSmhfdrojLZnWkah/XfTSixKb/dQzbAjpD7vvs6rWkJ+P07Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-arm64": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.9.tgz", + "integrity": "sha512-w48Yh/XbYHO2cBw8B5laK3vCAEKuocX5ItGXVDAqFE7Ze2wnR00/1vkY6GXglfRDOjWHu2XtxI0WKQ52x1qxEA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-x64": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.9.tgz", + "integrity": "sha512-90+J63VT7qImy9s3pkWL0ZX27VzVwMNCRzpLpe5yMzMYPbO1vcjL/w/Q5f/juAGMvP7a2Fd0H7IhAR6F7/i78A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@browser-logos/chrome": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@browser-logos/chrome/-/chrome-2.0.0.tgz", + "integrity": "sha512-JDr78z/3x4zTtBgZ0bunFq9D4QhjT5gPBy90rpJQ5VlJugK/7xGFUgwUIkVT5vfU7lTi2F0wB/5XeOPrV67ErA==", + "dev": true + }, + "node_modules/@browser-logos/firefox": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@browser-logos/firefox/-/firefox-3.0.10.tgz", + "integrity": "sha512-Bys3b02btUOvZyt02fFXfqPiNhYX6ukCtdvs4ERCSYnf6BVqCqeBR8niRYgyaECTdZwzqNCaUUKwQ0JX7lV1/A==", + "dev": true + }, + "node_modules/@browser-logos/safari": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@browser-logos/safari/-/safari-2.1.0.tgz", + "integrity": "sha512-diidPiK62E4hlAh0dyLfWQDZXi2SSAGiOuw6iqD1x8ztw7L/Sz3He46FhcxEzYa1hKi1blCkjnKDjqw6rQfgcA==", + "dev": true + }, + "node_modules/@croct/json": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@croct/json/-/json-2.1.0.tgz", + "integrity": "sha512-UrWfjNQVlBxN+OVcFwHmkjARMW55MBN04E9KfGac8ac8z1QnFVuiOOFtMWXCk3UwsyRqhsNaFoYLZC+xxqsVjQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@croct/json5-parser": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/@croct/json5-parser/-/json5-parser-0.2.2.tgz", + "integrity": "sha512-0NJMLrbeLbQ0eCVj3UoH/kG2QckUgOASfwmfDTjyW1xAYPyTNJXcWVT/dssJdTJd0pRchW+qF0VFWQHcxs1OVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@croct/json": "^2.1.0" + } + }, + "node_modules/@csstools/selector-resolve-nested": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@csstools/selector-resolve-nested/-/selector-resolve-nested-4.0.0.tgz", + "integrity": "sha512-9vAPxmp+Dx3wQBIUwc1v7Mdisw1kbbaGqXUM8QLTgWg7SoPGYtXBsMXvsFs/0Bn5yoFhcktzxNZGNaUt0VjgjA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT-0", + "engines": { + "node": ">=20.19.0" + }, + "peerDependencies": { + "postcss-selector-parser": "^7.1.1" + } + }, + "node_modules/@csstools/selector-specificity": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-6.0.0.tgz", + "integrity": "sha512-4sSgl78OtOXEX/2d++8A83zHNTgwCJMaR24FvsYL7Uf/VS8HZk9PTwR51elTbGqMuwH3szLvvOXEaVnqn0Z3zA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT-0", + "engines": { + "node": ">=20.19.0" + }, + "peerDependencies": { + "postcss-selector-parser": "^7.1.1" + } + }, + "node_modules/@emnapi/core": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.7.1.tgz", + "integrity": "sha512-o1uhUASyo921r2XtHYOHy7gdkGLge8ghBEQHMWmyJFoXlpU58kIrhhN3w26lpQb6dspetweapMn2CSNwQ8I4wg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.1.0", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.7.1.tgz", + "integrity": "sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/wasi-threads": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", + "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@envelop/core": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/@envelop/core/-/core-5.4.0.tgz", + "integrity": "sha512-/1fat63pySE8rw/dZZArEVytLD90JApY85deDJ0/34gm+yhQ3k70CloSUevxoOE4YCGveG3s9SJJfQeeB4NAtQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@envelop/instrumentation": "^1.0.0", + "@envelop/types": "^5.2.1", + "@whatwg-node/promise-helpers": "^1.2.4", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@envelop/instrumentation": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@envelop/instrumentation/-/instrumentation-1.0.0.tgz", + "integrity": "sha512-cxgkB66RQB95H3X27jlnxCRNTmPuSTgmBAq6/4n2Dtv4hsk4yz8FadA1ggmd0uZzvKqWD6CR+WFgTjhDqg7eyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@whatwg-node/promise-helpers": "^1.2.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@envelop/types": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/@envelop/types/-/types-5.2.1.tgz", + "integrity": "sha512-CsFmA3u3c2QoLDTfEpGr4t25fjMU31nyvse7IzWTvb0ZycuPjMjb0fjlheh+PbhBYb9YLugnT2uY6Mwcg1o+Zg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@whatwg-node/promise-helpers": "^1.0.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.1.tgz", + "integrity": "sha512-HHB50pdsBX6k47S4u5g/CaLjqS3qwaOVE5ILsq64jyzgMhLuCuZ8rGzM9yhsAjfjkbgUPMzZEPa7DAp7yz6vuA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.1.tgz", + "integrity": "sha512-kFqa6/UcaTbGm/NncN9kzVOODjhZW8e+FRdSeypWe6j33gzclHtwlANs26JrupOntlcWmB0u8+8HZo8s7thHvg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.1.tgz", + "integrity": "sha512-45fuKmAJpxnQWixOGCrS+ro4Uvb4Re9+UTieUY2f8AEc+t7d4AaZ6eUJ3Hva7dtrxAAWHtlEFsXFMAgNnGU9uQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.1.tgz", + "integrity": "sha512-LBEpOz0BsgMEeHgenf5aqmn/lLNTFXVfoWMUox8CtWWYK9X4jmQzWjoGoNb8lmAYml/tQ/Ysvm8q7szu7BoxRQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.1.tgz", + "integrity": "sha512-veg7fL8eMSCVKL7IW4pxb54QERtedFDfY/ASrumK/SbFsXnRazxY4YykN/THYqFnFwJ0aVjiUrVG2PwcdAEqQQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.1.tgz", + "integrity": "sha512-+3ELd+nTzhfWb07Vol7EZ+5PTbJ/u74nC6iv4/lwIU99Ip5uuY6QoIf0Hn4m2HoV0qcnRivN3KSqc+FyCHjoVQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.1.tgz", + "integrity": "sha512-/8Rfgns4XD9XOSXlzUDepG8PX+AVWHliYlUkFI3K3GB6tqbdjYqdhcb4BKRd7C0BhZSoaCxhv8kTcBrcZWP+xg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.1.tgz", + "integrity": "sha512-GITpD8dK9C+r+5yRT/UKVT36h/DQLOHdwGVwwoHidlnA168oD3uxA878XloXebK4Ul3gDBBIvEdL7go9gCUFzQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.1.tgz", + "integrity": "sha512-ieMID0JRZY/ZeCrsFQ3Y3NlHNCqIhTprJfDgSB3/lv5jJZ8FX3hqPyXWhe+gvS5ARMBJ242PM+VNz/ctNj//eA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.1.tgz", + "integrity": "sha512-W9//kCrh/6in9rWIBdKaMtuTTzNj6jSeG/haWBADqLLa9P8O5YSRDzgD5y9QBok4AYlzS6ARHifAb75V6G670Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.1.tgz", + "integrity": "sha512-VIUV4z8GD8rtSVMfAj1aXFahsi/+tcoXXNYmXgzISL+KB381vbSTNdeZHHHIYqFyXcoEhu9n5cT+05tRv13rlw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.1.tgz", + "integrity": "sha512-l4rfiiJRN7sTNI//ff65zJ9z8U+k6zcCg0LALU5iEWzY+a1mVZ8iWC1k5EsNKThZ7XCQ6YWtsZ8EWYm7r1UEsg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.1.tgz", + "integrity": "sha512-U0bEuAOLvO/DWFdygTHWY8C067FXz+UbzKgxYhXC0fDieFa0kDIra1FAhsAARRJbvEyso8aAqvPdNxzWuStBnA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.1.tgz", + "integrity": "sha512-NzdQ/Xwu6vPSf/GkdmRNsOfIeSGnh7muundsWItmBsVpMoNPVpM61qNzAVY3pZ1glzzAxLR40UyYM23eaDDbYQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.1.tgz", + "integrity": "sha512-7zlw8p3IApcsN7mFw0O1Z1PyEk6PlKMu18roImfl3iQHTnr/yAfYv6s4hXPidbDoI2Q0pW+5xeoM4eTCC0UdrQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.1.tgz", + "integrity": "sha512-cGj5wli+G+nkVQdZo3+7FDKC25Uh4ZVwOAK6A06Hsvgr8WqBBuOy/1s+PUEd/6Je+vjfm6stX0kmib5b/O2Ykw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.1.tgz", + "integrity": "sha512-z3H/HYI9MM0HTv3hQZ81f+AKb+yEoCRlUby1F80vbQ5XdzEMyY/9iNlAmhqiBKw4MJXwfgsh7ERGEOhrM1niMA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.1.tgz", + "integrity": "sha512-wzC24DxAvk8Em01YmVXyjl96Mr+ecTPyOuADAvjGg+fyBpGmxmcr2E5ttf7Im8D0sXZihpxzO1isus8MdjMCXQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.1.tgz", + "integrity": "sha512-1YQ8ybGi2yIXswu6eNzJsrYIGFpnlzEWRl6iR5gMgmsrR0FcNoV1m9k9sc3PuP5rUBLshOZylc9nqSgymI+TYg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.1.tgz", + "integrity": "sha512-5Z+DzLCrq5wmU7RDaMDe2DVXMRm2tTDvX2KU14JJVBN2CT/qov7XVix85QoJqHltpvAOZUAc3ndU56HSMWrv8g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.1.tgz", + "integrity": "sha512-Q73ENzIdPF5jap4wqLtsfh8YbYSZ8Q0wnxplOlZUOyZy7B4ZKW8DXGWgTCZmF8VWD7Tciwv5F4NsRf6vYlZtqg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.1.tgz", + "integrity": "sha512-ajbHrGM/XiK+sXM0JzEbJAen+0E+JMQZ2l4RR4VFwvV9JEERx+oxtgkpoKv1SevhjavK2z2ReHk32pjzktWbGg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.1.tgz", + "integrity": "sha512-IPUW+y4VIjuDVn+OMzHc5FV4GubIwPnsz6ubkvN8cuhEqH81NovB53IUlrlBkPMEPxvNnf79MGBoz8rZ2iW8HA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.1.tgz", + "integrity": "sha512-RIVRWiljWA6CdVu8zkWcRmGP7iRRIIwvhDKem8UMBjPql2TXM5PkDVvvrzMtj1V+WFPB4K7zkIGM7VzRtFkjdg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.1.tgz", + "integrity": "sha512-2BR5M8CPbptC1AK5JbJT1fWrHLvejwZidKx3UMSF0ecHMa+smhi16drIrCEggkgviBwLYd5nwrFLSl5Kho96RQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.1.tgz", + "integrity": "sha512-d5X6RMYv6taIymSk8JBP+nxv8DQAMY6A51GPgusqLdK9wBz5wWIXy1KjTck6HnjE9hqJzJRdk+1p/t5soSbCtw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@fastify/busboy": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.2.0.tgz", + "integrity": "sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@floating-ui/core": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.3.tgz", + "integrity": "sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==", + "license": "MIT", + "dependencies": { + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.4.tgz", + "integrity": "sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==", + "license": "MIT", + "dependencies": { + "@floating-ui/core": "^1.7.3", + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/react": { + "version": "0.27.16", + "resolved": "https://registry.npmjs.org/@floating-ui/react/-/react-0.27.16.tgz", + "integrity": "sha512-9O8N4SeG2z++TSM8QA/KTeKFBVCNEz/AGS7gWPJf6KFRzmRWixFRnCnkPHRDwSVZW6QPDO6uT0P2SpWNKCc9/g==", + "license": "MIT", + "dependencies": { + "@floating-ui/react-dom": "^2.1.6", + "@floating-ui/utils": "^0.2.10", + "tabbable": "^6.0.0" + }, + "peerDependencies": { + "react": ">=17.0.0", + "react-dom": ">=17.0.0" + } + }, + "node_modules/@floating-ui/react-dom": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.6.tgz", + "integrity": "sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw==", + "license": "MIT", + "dependencies": { + "@floating-ui/dom": "^1.7.4" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", + "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", + "license": "MIT" + }, + "node_modules/@fontsource/inconsolata": { + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/@fontsource/inconsolata/-/inconsolata-5.2.8.tgz", + "integrity": "sha512-lIZW+WOZYpUH91g9r6rYYhfTmptF3YPPM54ZOs8IYVeeL4SeiAu4tfj7mdr8llYEq31DLYgi6JtGIJa192gB0Q==", + "license": "OFL-1.1", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } + }, + "node_modules/@fontsource/inter": { + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.2.8.tgz", + "integrity": "sha512-P6r5WnJoKiNVV+zvW2xM13gNdFhAEpQ9dQJHt3naLvfg+LkF2ldgSLiF4T41lf1SQCM9QmkqPTn4TH568IRagg==", + "license": "OFL-1.1", + "funding": { + "url": "https://github.com/sponsors/ayuhito" + } + }, + "node_modules/@graphql-codegen/add": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/add/-/add-6.0.0.tgz", + "integrity": "sha512-biFdaURX0KTwEJPQ1wkT6BRgNasqgQ5KbCI1a3zwtLtO7XTo7/vKITPylmiU27K5DSOWYnY/1jfSqUAEBuhZrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^6.0.0", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/add/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/cli": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/@graphql-codegen/cli/-/cli-6.1.1.tgz", + "integrity": "sha512-Ni8UdZ6D/UTvLvDtPb6PzshI0lTqtLDnmv/2t1w2SYP92H0MMEdAzxB/ujDWwIXm2LzVPvvrGvzzCTMsyXa+mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/generator": "^7.18.13", + "@babel/template": "^7.18.10", + "@babel/types": "^7.18.13", + "@graphql-codegen/client-preset": "^5.2.0", + "@graphql-codegen/core": "^5.0.0", + "@graphql-codegen/plugin-helpers": "^6.1.0", + "@graphql-tools/apollo-engine-loader": "^8.0.0", + "@graphql-tools/code-file-loader": "^8.0.0", + "@graphql-tools/git-loader": "^8.0.0", + "@graphql-tools/github-loader": "^9.0.0", + "@graphql-tools/graphql-file-loader": "^8.0.0", + "@graphql-tools/json-file-loader": "^8.0.0", + "@graphql-tools/load": "^8.1.0", + "@graphql-tools/url-loader": "^9.0.0", + "@graphql-tools/utils": "^10.0.0", + "@inquirer/prompts": "^7.8.2", + "@whatwg-node/fetch": "^0.10.0", + "chalk": "^4.1.0", + "cosmiconfig": "^9.0.0", + "debounce": "^2.0.0", + "detect-indent": "^6.0.0", + "graphql-config": "^5.1.1", + "is-glob": "^4.0.1", + "jiti": "^2.3.0", + "json-to-pretty-yaml": "^1.2.2", + "listr2": "^9.0.0", + "log-symbols": "^4.0.0", + "micromatch": "^4.0.5", + "shell-quote": "^1.7.3", + "string-env-interpolation": "^1.0.1", + "ts-log": "^2.2.3", + "tslib": "^2.4.0", + "yaml": "^2.3.1", + "yargs": "^17.0.0" + }, + "bin": { + "gql-gen": "cjs/bin.js", + "graphql-code-generator": "cjs/bin.js", + "graphql-codegen": "cjs/bin.js", + "graphql-codegen-esm": "esm/bin.js" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "@parcel/watcher": "^2.1.0", + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + }, + "peerDependenciesMeta": { + "@parcel/watcher": { + "optional": true + } + } + }, + "node_modules/@graphql-codegen/client-preset": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/client-preset/-/client-preset-5.2.2.tgz", + "integrity": "sha512-1xufIJZr04ylx0Dnw49m8Jrx1s1kujUNVm+Tp5cPRsQmgPN9VjB7wWY7CGD8ArStv6Vjb0a31Xnm5I+VzZM+Rw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/template": "^7.20.7", + "@graphql-codegen/add": "^6.0.0", + "@graphql-codegen/gql-tag-operations": "5.1.2", + "@graphql-codegen/plugin-helpers": "^6.1.0", + "@graphql-codegen/typed-document-node": "^6.1.5", + "@graphql-codegen/typescript": "^5.0.7", + "@graphql-codegen/typescript-operations": "^5.0.7", + "@graphql-codegen/visitor-plugin-common": "^6.2.2", + "@graphql-tools/documents": "^1.0.0", + "@graphql-tools/utils": "^10.0.0", + "@graphql-typed-document-node/core": "3.2.0", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0", + "graphql-sock": "^1.0.0" + }, + "peerDependenciesMeta": { + "graphql-sock": { + "optional": true + } + } + }, + "node_modules/@graphql-codegen/client-preset/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/core": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/core/-/core-5.0.0.tgz", + "integrity": "sha512-vLTEW0m8LbE4xgRwbFwCdYxVkJ1dBlVJbQyLb9Q7bHnVFgHAP982Xo8Uv7FuPBmON+2IbTjkCqhFLHVZbqpvjQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^6.0.0", + "@graphql-tools/schema": "^10.0.0", + "@graphql-tools/utils": "^10.0.0", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/core/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/gql-tag-operations": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/gql-tag-operations/-/gql-tag-operations-5.1.2.tgz", + "integrity": "sha512-BIv66VJ2bKlpfXBeVakJxihBSKnBIdGFLMaFdnGPxqYlKIzaGffjsGbhViPwwBinmBChW4Se6PU4Py7eysYEiA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^6.1.0", + "@graphql-codegen/visitor-plugin-common": "6.2.2", + "@graphql-tools/utils": "^10.0.0", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/gql-tag-operations/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/plugin-helpers": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/plugin-helpers/-/plugin-helpers-6.1.0.tgz", + "integrity": "sha512-JJypehWTcty9kxKiqH7TQOetkGdOYjY78RHlI+23qB59cV2wxjFFVf8l7kmuXS4cpGVUNfIjFhVr7A1W7JMtdA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^10.0.0", + "change-case-all": "1.0.15", + "common-tags": "1.8.2", + "import-from": "4.0.0", + "lodash": "~4.17.0", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/plugin-helpers/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/schema-ast": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/schema-ast/-/schema-ast-5.0.0.tgz", + "integrity": "sha512-jn7Q3PKQc0FxXjbpo9trxzlz/GSFQWxL042l0iC8iSbM/Ar+M7uyBwMtXPsev/3Razk+osQyreghIz0d2+6F7Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^6.0.0", + "@graphql-tools/utils": "^10.0.0", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/schema-ast/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/typed-document-node": { + "version": "6.1.5", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typed-document-node/-/typed-document-node-6.1.5.tgz", + "integrity": "sha512-6dgEPz+YRMzSPpATj7tsKh/L6Y8OZImiyXIUzvSq/dRAEgoinahrES5y/eZQyc7CVxfoFCyHF9KMQQ9jiLn7lw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^6.1.0", + "@graphql-codegen/visitor-plugin-common": "6.2.2", + "auto-bind": "~4.0.0", + "change-case-all": "1.0.15", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/typed-document-node/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/typescript": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript/-/typescript-5.0.7.tgz", + "integrity": "sha512-kZwcu9Iat5RWXxLGPnDbG6qVbGTigF25/aGqCG/DCQ1Al8RufSjVXhIOkJBp7QWAqXn3AupHXL1WTMXP7xs4dQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^6.1.0", + "@graphql-codegen/schema-ast": "^5.0.0", + "@graphql-codegen/visitor-plugin-common": "6.2.2", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/typescript-msw": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript-msw/-/typescript-msw-3.0.1.tgz", + "integrity": "sha512-SSJxFHDDNpd+EO/6qqRs3k/ZMTgbAisoB/RB8oQ9yt8f7VcuKkpefCEuKIuKTl4XFqPPnTXUEToh7Xs1/w6VAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^3.0.0", + "@graphql-codegen/visitor-plugin-common": "2.13.8", + "auto-bind": "~4.0.0", + "change-case-all": "1.0.15", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">= 16.0.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0", + "msw": "^2.0.0" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/@ardatan/relay-compiler": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@ardatan/relay-compiler/-/relay-compiler-12.0.0.tgz", + "integrity": "sha512-9anThAaj1dQr6IGmzBMcfzOQKTa5artjuPmw8NYK/fiGEMjADbSguBY2FMDykt+QhilR3wc9VA/3yVju7JHg7Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.14.0", + "@babel/generator": "^7.14.0", + "@babel/parser": "^7.14.0", + "@babel/runtime": "^7.0.0", + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.0.0", + "babel-preset-fbjs": "^3.4.0", + "chalk": "^4.0.0", + "fb-watchman": "^2.0.0", + "fbjs": "^3.0.0", + "glob": "^7.1.1", + "immutable": "~3.7.6", + "invariant": "^2.2.4", + "nullthrows": "^1.1.1", + "relay-runtime": "12.0.0", + "signedsource": "^1.0.0", + "yargs": "^15.3.1" + }, + "bin": { + "relay-compiler": "bin/relay-compiler" + }, + "peerDependencies": { + "graphql": "*" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/@graphql-codegen/plugin-helpers": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/plugin-helpers/-/plugin-helpers-3.1.2.tgz", + "integrity": "sha512-emOQiHyIliVOIjKVKdsI5MXj312zmRDwmHpyUTZMjfpvxq/UVAHUJIVdVf+lnjjrI+LXBTgMlTWTgHQfmICxjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^9.0.0", + "change-case-all": "1.0.15", + "common-tags": "1.8.2", + "import-from": "4.0.0", + "lodash": "~4.17.0", + "tslib": "~2.4.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/@graphql-codegen/plugin-helpers/node_modules/tslib": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz", + "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/@graphql-codegen/visitor-plugin-common": { + "version": "2.13.8", + "resolved": "https://registry.npmjs.org/@graphql-codegen/visitor-plugin-common/-/visitor-plugin-common-2.13.8.tgz", + "integrity": "sha512-IQWu99YV4wt8hGxIbBQPtqRuaWZhkQRG2IZKbMoSvh0vGeWb3dB0n0hSgKaOOxDY+tljtOf9MTcUYvJslQucMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^3.1.2", + "@graphql-tools/optimize": "^1.3.0", + "@graphql-tools/relay-operation-optimizer": "^6.5.0", + "@graphql-tools/utils": "^9.0.0", + "auto-bind": "~4.0.0", + "change-case-all": "1.0.15", + "dependency-graph": "^0.11.0", + "graphql-tag": "^2.11.0", + "parse-filepath": "^1.0.2", + "tslib": "~2.4.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/@graphql-codegen/visitor-plugin-common/node_modules/tslib": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz", + "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/@graphql-tools/optimize": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/optimize/-/optimize-1.4.0.tgz", + "integrity": "sha512-dJs/2XvZp+wgHH8T5J2TqptT9/6uVzIYvA6uFACha+ufvdMBedkfR4b4GbT8jAKLRARiqRTxy3dctnwkTM2tdw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.4.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/@graphql-tools/relay-operation-optimizer": { + "version": "6.5.18", + "resolved": "https://registry.npmjs.org/@graphql-tools/relay-operation-optimizer/-/relay-operation-optimizer-6.5.18.tgz", + "integrity": "sha512-mc5VPyTeV+LwiM+DNvoDQfPqwQYhPV/cl5jOBjTgSniyaq8/86aODfMkrE2OduhQ5E00hqrkuL2Fdrgk0w1QJg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ardatan/relay-compiler": "12.0.0", + "@graphql-tools/utils": "^9.2.1", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/@graphql-tools/utils": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-9.2.1.tgz", + "integrity": "sha512-WUw506Ql6xzmOORlriNrD6Ugx+HjVgYxt9KCXD9mHAak+eaXSwuGGPyE60hy9xaDEoXKBsG7SkG69ybitaVl6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-typed-document-node/core": "^3.1.1", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/dependency-graph": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/dependency-graph/-/dependency-graph-0.11.0.tgz", + "integrity": "sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@graphql-codegen/typescript-msw/node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@graphql-codegen/typescript-operations": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript-operations/-/typescript-operations-5.0.7.tgz", + "integrity": "sha512-5N3myNse1putRQlp8+l1k9ayvc98oq2mPJx0zN8MTOlTBxcb2grVPFRLy5wJJjuv9NffpyCkVJ9LvUaf8mqQgg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^6.1.0", + "@graphql-codegen/typescript": "^5.0.7", + "@graphql-codegen/visitor-plugin-common": "6.2.2", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0", + "graphql-sock": "^1.0.0" + }, + "peerDependenciesMeta": { + "graphql-sock": { + "optional": true + } + } + }, + "node_modules/@graphql-codegen/typescript-operations/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/typescript/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-codegen/visitor-plugin-common": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/visitor-plugin-common/-/visitor-plugin-common-6.2.2.tgz", + "integrity": "sha512-wEJ4zJj58PKlXISItZfr0xIHyM1lAuRfoflPegsb1L17Mx5+YzNOy0WAlLele3yzyV89WvCiprFKMcVQ7KfDXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-codegen/plugin-helpers": "^6.1.0", + "@graphql-tools/optimize": "^2.0.0", + "@graphql-tools/relay-operation-optimizer": "^7.0.0", + "@graphql-tools/utils": "^10.0.0", + "auto-bind": "~4.0.0", + "change-case-all": "1.0.15", + "dependency-graph": "^1.0.0", + "graphql-tag": "^2.11.0", + "parse-filepath": "^1.0.2", + "tslib": "~2.6.0" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/visitor-plugin-common/node_modules/tslib": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/@graphql-hive/signal": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@graphql-hive/signal/-/signal-2.0.0.tgz", + "integrity": "sha512-Pz8wB3K0iU6ae9S1fWfsmJX24CcGeTo6hE7T44ucmV/ALKRj+bxClmqrYcDT7v3f0d12Rh4FAXBb6gon+WkDpQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@graphql-tools/apollo-engine-loader": { + "version": "8.0.27", + "resolved": "https://registry.npmjs.org/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-8.0.27.tgz", + "integrity": "sha512-XT4BvqmRXkVaT8GgNb9/pr8u4M4vTcvGuI2GlvK+albrJNIV8VxTpsdVYma3kw+VtSIYrxEvLixlfDA/KdmDpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^10.11.0", + "@whatwg-node/fetch": "^0.10.13", + "sync-fetch": "0.6.0-2", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/batch-execute": { + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/batch-execute/-/batch-execute-10.0.4.tgz", + "integrity": "sha512-t8E0ILelbaIju0aNujMkKetUmbv3/07nxGSv0kEGLBk9GNtEmQ/Bjj8ZTo2WN35/Fy70zCHz2F/48Nx/Ec48cA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^10.10.3", + "@whatwg-node/promise-helpers": "^1.3.2", + "dataloader": "^2.2.3", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/code-file-loader": { + "version": "8.1.27", + "resolved": "https://registry.npmjs.org/@graphql-tools/code-file-loader/-/code-file-loader-8.1.27.tgz", + "integrity": "sha512-q3GDbm+7m3DiAnqxa+lYMgYZd49+ez6iGFfXHmzP6qAnf5WlBxRNKNjNVuxOgoV30DCr+vOJfoXeU7VN1qqGWQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/graphql-tag-pluck": "8.3.26", + "@graphql-tools/utils": "^10.11.0", + "globby": "^11.0.3", + "tslib": "^2.4.0", + "unixify": "^1.0.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/delegate": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/delegate/-/delegate-12.0.2.tgz", + "integrity": "sha512-1X93onxNgOzRvnZ8Xulwi6gNuBeuDxvGYOjUHEZyesPCsaWsyiVj1Wk6Pw/DTPGLy70sOFUKQGcaZbWnDORM2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/batch-execute": "^10.0.4", + "@graphql-tools/executor": "^1.4.13", + "@graphql-tools/schema": "^10.0.29", + "@graphql-tools/utils": "^10.10.3", + "@repeaterjs/repeater": "^3.0.6", + "@whatwg-node/promise-helpers": "^1.3.2", + "dataloader": "^2.2.3", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/documents": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/documents/-/documents-1.0.1.tgz", + "integrity": "sha512-aweoMH15wNJ8g7b2r4C4WRuJxZ0ca8HtNO54rkye/3duxTkW4fGBEutCx03jCIr5+a1l+4vFJNP859QnAVBVCA==", + "dev": true, + "license": "MIT", + "dependencies": { + "lodash.sortby": "^4.7.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/executor": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor/-/executor-1.5.0.tgz", + "integrity": "sha512-3HzAxfexmynEWwRB56t/BT+xYKEYLGPvJudR1jfs+XZX8bpfqujEhqVFoxmkpEE8BbFcKuBNoQyGkTi1eFJ+hA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^10.11.0", + "@graphql-typed-document-node/core": "^3.2.0", + "@repeaterjs/repeater": "^3.0.4", + "@whatwg-node/disposablestack": "^0.0.6", + "@whatwg-node/promise-helpers": "^1.0.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/executor-common": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-common/-/executor-common-1.0.5.tgz", + "integrity": "sha512-gsBRxP4ui8s7/ppKGCJUQ9xxTNoFpNYmEirgM52EHo74hL5hrpS5o4zOmBH33+9t2ZasBziIfupYtLNa0DgK0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@envelop/core": "^5.4.0", + "@graphql-tools/utils": "^10.10.3" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/executor-graphql-ws": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-3.1.3.tgz", + "integrity": "sha512-q4k8KLoH2U51XdWJRdiW/KIKbBOtJ1mcILv0ALvBkOF99C3vwGj2zr4U0AMGCD3HzML2mPZuajhfYo/xB/pnZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/executor-common": "^1.0.5", + "@graphql-tools/utils": "^10.10.3", + "@whatwg-node/disposablestack": "^0.0.6", + "graphql-ws": "^6.0.6", + "isows": "^1.0.7", + "tslib": "^2.8.1", + "ws": "^8.18.3" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/executor-http": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-http/-/executor-http-3.0.7.tgz", + "integrity": "sha512-sHjtiUZmRtkjhpSzMhxT2ywAGzHjuB1rHsiaSLAq8U5BQg5WoLakKYD7BajgVHwNbfWEc+NnFiJI7ldyhiciiQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-hive/signal": "^2.0.0", + "@graphql-tools/executor-common": "^1.0.5", + "@graphql-tools/utils": "^10.10.3", + "@repeaterjs/repeater": "^3.0.4", + "@whatwg-node/disposablestack": "^0.0.6", + "@whatwg-node/fetch": "^0.10.13", + "@whatwg-node/promise-helpers": "^1.3.2", + "meros": "^1.3.2", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/executor-legacy-ws": { + "version": "1.1.24", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-1.1.24.tgz", + "integrity": "sha512-wfSpOJCxeBcwVXy3JS4TB4oLwVICuVKPlPQhcAjTRPWYwKerE0HosgUzxCX1fEQ4l1B1OMgKWRglGpoXExKqsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^10.11.0", + "@types/ws": "^8.0.0", + "isomorphic-ws": "^5.0.0", + "tslib": "^2.4.0", + "ws": "^8.17.1" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/git-loader": { + "version": "8.0.31", + "resolved": "https://registry.npmjs.org/@graphql-tools/git-loader/-/git-loader-8.0.31.tgz", + "integrity": "sha512-xVHM1JecjpU2P0aOj/IaIUc3w6It8sWOdrJElWFZdY9yfWRqXFYwfemtsn/JOrJDIJXYeGpJ304OeqJD5vFIEw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/graphql-tag-pluck": "8.3.26", + "@graphql-tools/utils": "^10.11.0", + "is-glob": "4.0.3", + "micromatch": "^4.0.8", + "tslib": "^2.4.0", + "unixify": "^1.0.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/github-loader": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/github-loader/-/github-loader-9.0.5.tgz", + "integrity": "sha512-89FRDQGMlzL3607BCQtJhKEiQaZtTmdAnyC5Hmi9giTQXVzEXBbMEZOU0qILxj64cr+smNBx5XqxQ1xn0uZeEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/executor-http": "^3.0.6", + "@graphql-tools/graphql-tag-pluck": "^8.3.26", + "@graphql-tools/utils": "^10.11.0", + "@whatwg-node/fetch": "^0.10.13", + "@whatwg-node/promise-helpers": "^1.0.0", + "sync-fetch": "0.6.0-2", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/graphql-file-loader": { + "version": "8.1.8", + "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-file-loader/-/graphql-file-loader-8.1.8.tgz", + "integrity": "sha512-dZi9Cw+NWEzJAqzIUON9qjZfjebjcoT4H6jqLkEoAv6kRtTq52m4BLXgFWjMHU7PNLE9OOHB9St7UeZQL+GYrw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/import": "7.1.8", + "@graphql-tools/utils": "^10.11.0", + "globby": "^11.0.3", + "tslib": "^2.4.0", + "unixify": "^1.0.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/graphql-tag-pluck": { + "version": "8.3.26", + "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-8.3.26.tgz", + "integrity": "sha512-hLsX++KA3YR/PnNJGBq1weSAY8XUUAQFfOSHanLHA2qs5lcNgU6KWbiLiRsJ/B/ZNi2ZO687dhzeZ4h4Yt0V6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.26.10", + "@babel/parser": "^7.26.10", + "@babel/plugin-syntax-import-assertions": "^7.26.0", + "@babel/traverse": "^7.26.10", + "@babel/types": "^7.26.10", + "@graphql-tools/utils": "^10.11.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/import": { + "version": "7.1.8", + "resolved": "https://registry.npmjs.org/@graphql-tools/import/-/import-7.1.8.tgz", + "integrity": "sha512-aUKHMbaeHhCkS867mNCk9sJuvd9xE3Ocr+alwdvILkDxHf7Xaumx4mK8tN9FAXeKhQWGGD5QpkIBnUzt2xoX/A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^10.11.0", + "@theguild/federation-composition": "^0.21.0", + "resolve-from": "5.0.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/json-file-loader": { + "version": "8.0.25", + "resolved": "https://registry.npmjs.org/@graphql-tools/json-file-loader/-/json-file-loader-8.0.25.tgz", + "integrity": "sha512-Dnr9z818Kdn3rfoZO/+/ZQUqWavjV7AhEp4edV1mGsX+J1HFkNC3WMl6MD3W0hth2HWLQpCFJDdOPnchxnFNfA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^10.11.0", + "globby": "^11.0.3", + "tslib": "^2.4.0", + "unixify": "^1.0.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/load": { + "version": "8.1.7", + "resolved": "https://registry.npmjs.org/@graphql-tools/load/-/load-8.1.7.tgz", + "integrity": "sha512-RxrHOC4vVI50+Q1mwgpmTVCB/UDDYVEGD/g/hP3tT2BW9F3rJ7Z3Lmt/nGfPQuWPao3w6vgJ9oSAWtism7CU5w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/schema": "^10.0.30", + "@graphql-tools/utils": "^10.11.0", + "p-limit": "3.1.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/merge": { + "version": "9.1.6", + "resolved": "https://registry.npmjs.org/@graphql-tools/merge/-/merge-9.1.6.tgz", + "integrity": "sha512-bTnP+4oom4nDjmkS3Ykbe+ljAp/RIiWP3R35COMmuucS24iQxGLa9Hn8VMkLIoaoPxgz6xk+dbC43jtkNsFoBw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^10.11.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/optimize": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/optimize/-/optimize-2.0.0.tgz", + "integrity": "sha512-nhdT+CRGDZ+bk68ic+Jw1OZ99YCDIKYA5AlVAnBHJvMawSx9YQqQAIj4refNc1/LRieGiuWvhbG3jvPVYho0Dg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/relay-operation-optimizer": { + "version": "7.0.26", + "resolved": "https://registry.npmjs.org/@graphql-tools/relay-operation-optimizer/-/relay-operation-optimizer-7.0.26.tgz", + "integrity": "sha512-cVdS2Hw4hg/WgPVV2wRIzZM975pW5k4vdih3hR4SvEDQVr6MmozmlTQSqzMyi9yg8LKTq540Oz3bYQa286yGmg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ardatan/relay-compiler": "^12.0.3", + "@graphql-tools/utils": "^10.11.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/schema": { + "version": "10.0.30", + "resolved": "https://registry.npmjs.org/@graphql-tools/schema/-/schema-10.0.30.tgz", + "integrity": "sha512-yPXU17uM/LR90t92yYQqn9mAJNOVZJc0nQtYeZyZeQZeQjwIGlTubvvoDL0fFVk+wZzs4YQOgds2NwSA4npodA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/merge": "^9.1.6", + "@graphql-tools/utils": "^10.11.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/url-loader": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/url-loader/-/url-loader-9.0.5.tgz", + "integrity": "sha512-EPNhZBBL48TudLdyenOw1wV9dI7vsinWLLxSTtkx4zUQxmU+p/LxMyf7MUwjmp3yFZhR/9XchsTZX6uvOyXWqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/executor-graphql-ws": "^3.1.2", + "@graphql-tools/executor-http": "^3.0.6", + "@graphql-tools/executor-legacy-ws": "^1.1.24", + "@graphql-tools/utils": "^10.11.0", + "@graphql-tools/wrap": "^11.0.0", + "@types/ws": "^8.0.0", + "@whatwg-node/fetch": "^0.10.13", + "@whatwg-node/promise-helpers": "^1.0.0", + "isomorphic-ws": "^5.0.0", + "sync-fetch": "0.6.0-2", + "tslib": "^2.4.0", + "ws": "^8.17.1" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/utils": { + "version": "10.11.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-10.11.0.tgz", + "integrity": "sha512-iBFR9GXIs0gCD+yc3hoNswViL1O5josI33dUqiNStFI/MHLCEPduasceAcazRH77YONKNiviHBV8f7OgcT4o2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-typed-document-node/core": "^3.1.1", + "@whatwg-node/promise-helpers": "^1.0.0", + "cross-inspect": "1.0.1", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/wrap": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/wrap/-/wrap-11.1.2.tgz", + "integrity": "sha512-TcKZzUzJNmuyMBQ1oMdnxhBUUacN/5VEJu0/1KVce2aIzCwTTaN9JTU3MgjO7l5Ixn4QLkc6XbxYNv0cHDQgtQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/delegate": "^12.0.2", + "@graphql-tools/schema": "^10.0.29", + "@graphql-tools/utils": "^10.10.3", + "@whatwg-node/promise-helpers": "^1.3.2", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-typed-document-node/core": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@graphql-typed-document-node/core/-/core-3.2.0.tgz", + "integrity": "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@inquirer/ansi": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@inquirer/ansi/-/ansi-1.0.2.tgz", + "integrity": "sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/checkbox": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.3.2.tgz", + "integrity": "sha512-VXukHf0RR1doGe6Sm4F0Em7SWYLTHSsbGfJdS9Ja2bX5/D5uwVOEjr07cncLROdBvmnvCATYEWlHqYmXv2IlQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/ansi": "^1.0.2", + "@inquirer/core": "^10.3.2", + "@inquirer/figures": "^1.0.15", + "@inquirer/type": "^3.0.10", + "yoctocolors-cjs": "^2.1.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/confirm": { + "version": "5.1.21", + "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.21.tgz", + "integrity": "sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.3.2", + "@inquirer/type": "^3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/core": { + "version": "10.3.2", + "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.3.2.tgz", + "integrity": "sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/ansi": "^1.0.2", + "@inquirer/figures": "^1.0.15", + "@inquirer/type": "^3.0.10", + "cli-width": "^4.1.0", + "mute-stream": "^2.0.0", + "signal-exit": "^4.1.0", + "wrap-ansi": "^6.2.0", + "yoctocolors-cjs": "^2.1.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/editor": { + "version": "4.2.23", + "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.23.tgz", + "integrity": "sha512-aLSROkEwirotxZ1pBaP8tugXRFCxW94gwrQLxXfrZsKkfjOYC1aRvAZuhpJOb5cu4IBTJdsCigUlf2iCOu4ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.3.2", + "@inquirer/external-editor": "^1.0.3", + "@inquirer/type": "^3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/expand": { + "version": "4.0.23", + "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.23.tgz", + "integrity": "sha512-nRzdOyFYnpeYTTR2qFwEVmIWypzdAx/sIkCMeTNTcflFOovfqUk+HcFhQQVBftAh9gmGrpFj6QcGEqrDMDOiew==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.3.2", + "@inquirer/type": "^3.0.10", + "yoctocolors-cjs": "^2.1.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/external-editor": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@inquirer/external-editor/-/external-editor-1.0.3.tgz", + "integrity": "sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chardet": "^2.1.1", + "iconv-lite": "^0.7.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/figures": { + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.15.tgz", + "integrity": "sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/input": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.3.1.tgz", + "integrity": "sha512-kN0pAM4yPrLjJ1XJBjDxyfDduXOuQHrBB8aLDMueuwUGn+vNpF7Gq7TvyVxx8u4SHlFFj4trmj+a2cbpG4Jn1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.3.2", + "@inquirer/type": "^3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/number": { + "version": "3.0.23", + "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.23.tgz", + "integrity": "sha512-5Smv0OK7K0KUzUfYUXDXQc9jrf8OHo4ktlEayFlelCjwMXz0299Y8OrI+lj7i4gCBY15UObk76q0QtxjzFcFcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.3.2", + "@inquirer/type": "^3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/password": { + "version": "4.0.23", + "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.23.tgz", + "integrity": "sha512-zREJHjhT5vJBMZX/IUbyI9zVtVfOLiTO66MrF/3GFZYZ7T4YILW5MSkEYHceSii/KtRk+4i3RE7E1CUXA2jHcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/ansi": "^1.0.2", + "@inquirer/core": "^10.3.2", + "@inquirer/type": "^3.0.10" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/prompts": { + "version": "7.10.1", + "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.10.1.tgz", + "integrity": "sha512-Dx/y9bCQcXLI5ooQ5KyvA4FTgeo2jYj/7plWfV5Ak5wDPKQZgudKez2ixyfz7tKXzcJciTxqLeK7R9HItwiByg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/checkbox": "^4.3.2", + "@inquirer/confirm": "^5.1.21", + "@inquirer/editor": "^4.2.23", + "@inquirer/expand": "^4.0.23", + "@inquirer/input": "^4.3.1", + "@inquirer/number": "^3.0.23", + "@inquirer/password": "^4.0.23", + "@inquirer/rawlist": "^4.1.11", + "@inquirer/search": "^3.2.2", + "@inquirer/select": "^4.4.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/rawlist": { + "version": "4.1.11", + "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.1.11.tgz", + "integrity": "sha512-+LLQB8XGr3I5LZN/GuAHo+GpDJegQwuPARLChlMICNdwW7OwV2izlCSCxN6cqpL0sMXmbKbFcItJgdQq5EBXTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.3.2", + "@inquirer/type": "^3.0.10", + "yoctocolors-cjs": "^2.1.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/search": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.2.2.tgz", + "integrity": "sha512-p2bvRfENXCZdWF/U2BXvnSI9h+tuA8iNqtUKb9UWbmLYCRQxd8WkvwWvYn+3NgYaNwdUkHytJMGG4MMLucI1kA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^10.3.2", + "@inquirer/figures": "^1.0.15", + "@inquirer/type": "^3.0.10", + "yoctocolors-cjs": "^2.1.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/select": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.4.2.tgz", + "integrity": "sha512-l4xMuJo55MAe+N7Qr4rX90vypFwCajSakx59qe/tMaC1aEHWLyw68wF4o0A4SLAY4E0nd+Vt+EyskeDIqu1M6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/ansi": "^1.0.2", + "@inquirer/core": "^10.3.2", + "@inquirer/figures": "^1.0.15", + "@inquirer/type": "^3.0.10", + "yoctocolors-cjs": "^2.1.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@inquirer/type": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.10.tgz", + "integrity": "sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/@isaacs/balanced-match": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", + "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/brace-expansion": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.1.tgz", + "integrity": "sha512-WMz71T1JS624nWj2n2fnYAuPovhv7EUhk69R6i9dsVyzxt5eM3bjwvgk9L+APE1TRscGysAVMANkB0jh0LQZrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@isaacs/balanced-match": "^4.0.1" + }, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/cliui": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-9.0.0.tgz", + "integrity": "sha512-AokJm4tuBHillT+FpMtxQ60n8ObyXBatq7jD2/JA9dxbDDokKQm8KMht5ibGzLVU9IJDIKK4TPKgMHEYMn3lMg==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/@joshwooding/vite-plugin-react-docgen-typescript": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/@joshwooding/vite-plugin-react-docgen-typescript/-/vite-plugin-react-docgen-typescript-0.6.4.tgz", + "integrity": "sha512-6PyZBYKnnVNqOSB0YFly+62R7dmov8segT27A+RVTBVd4iAE6kbW9QBJGlyR2yG4D4ohzhZSTIu7BK1UTtmFFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "glob": "^13.0.1", + "react-docgen-typescript": "^2.2.2" + }, + "peerDependencies": { + "typescript": ">= 4.3.x", + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@mdx-js/react": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.1.1.tgz", + "integrity": "sha512-f++rKLQgUVYDAtECQ6fn/is15GkEH9+nZPM3MS0RcxVqoTfawHvDlSCH7JbMhAM6uJ32v3eXLvLmLvjGu7PTQw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mdx": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=16", + "react": ">=16" + } + }, + "node_modules/@mswjs/interceptors": { + "version": "0.41.0", + "resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.41.0.tgz", + "integrity": "sha512-edAo9bW53BLYeSK+UPRr2Iz1Fj9DeGMjytvVM0HXRoo750ElWUgPsZPAOTQa12EUiwgDErH2PsFNTLvk1jBxjQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@open-draft/deferred-promise": "^2.2.0", + "@open-draft/logger": "^0.3.0", + "@open-draft/until": "^2.0.0", + "is-node-process": "^1.2.0", + "outvariant": "^1.4.3", + "strict-event-emitter": "^0.5.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@napi-rs/wasm-runtime": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.0.tgz", + "integrity": "sha512-Fq6DJW+Bb5jaWE69/qOE0D1TUN9+6uWhCeZpdnSBk14pjLcCWR7Q8n49PTSPHazM37JqrsdpEthXy2xn6jWWiA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.7.1", + "@emnapi/runtime": "^1.7.1", + "@tybys/wasm-util": "^0.10.1" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@open-draft/deferred-promise": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz", + "integrity": "sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@open-draft/logger": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@open-draft/logger/-/logger-0.3.0.tgz", + "integrity": "sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-node-process": "^1.2.0", + "outvariant": "^1.4.0" + } + }, + "node_modules/@open-draft/until": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@open-draft/until/-/until-2.1.0.tgz", + "integrity": "sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@oxc-resolver/binding-android-arm-eabi": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-android-arm-eabi/-/binding-android-arm-eabi-11.15.0.tgz", + "integrity": "sha512-Q+lWuFfq7whNelNJIP1dhXaVz4zO9Tu77GcQHyxDWh3MaCoO2Bisphgzmsh4ZoUe2zIchQh6OvQL99GlWHg9Tw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@oxc-resolver/binding-android-arm64": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-android-arm64/-/binding-android-arm64-11.15.0.tgz", + "integrity": "sha512-vbdBttesHR0W1oJaxgWVTboyMUuu+VnPsHXJ6jrXf4czELzB6GIg5DrmlyhAmFBhjwov+yJH/DfTnHS+2sDgOw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@oxc-resolver/binding-darwin-arm64": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-darwin-arm64/-/binding-darwin-arm64-11.15.0.tgz", + "integrity": "sha512-R67lsOe1UzNjqVBCwCZX1rlItTsj/cVtBw4Uy19CvTicqEWvwaTn8t34zLD75LQwDDPCY3C8n7NbD+LIdw+ZoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@oxc-resolver/binding-darwin-x64": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-darwin-x64/-/binding-darwin-x64-11.15.0.tgz", + "integrity": "sha512-77mya5F8WV0EtCxI0MlVZcqkYlaQpfNwl/tZlfg4jRsoLpFbaTeWv75hFm6TE84WULVlJtSgvf7DhoWBxp9+ZQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@oxc-resolver/binding-freebsd-x64": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-freebsd-x64/-/binding-freebsd-x64-11.15.0.tgz", + "integrity": "sha512-X1Sz7m5PC+6D3KWIDXMUtux+0Imj6HfHGdBStSvgdI60OravzI1t83eyn6eN0LPTrynuPrUgjk7tOnOsBzSWHw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@oxc-resolver/binding-linux-arm-gnueabihf": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-11.15.0.tgz", + "integrity": "sha512-L1x/wCaIRre+18I4cH/lTqSAymlV0k4HqfSYNNuI9oeL28Ks86lI6O5VfYL6sxxWYgjuWB98gNGo7tq7d4GarQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-arm-musleabihf": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-11.15.0.tgz", + "integrity": "sha512-abGXd/zMGa0tH8nKlAXdOnRy4G7jZmkU0J85kMKWns161bxIgGn/j7zxqh3DKEW98wAzzU9GofZMJ0P5YCVPVw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-arm64-gnu": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-11.15.0.tgz", + "integrity": "sha512-SVjjjtMW66Mza76PBGJLqB0KKyFTBnxmtDXLJPbL6ZPGSctcXVmujz7/WAc0rb9m2oV0cHQTtVjnq6orQnI/jg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-arm64-musl": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-musl/-/binding-linux-arm64-musl-11.15.0.tgz", + "integrity": "sha512-JDv2/AycPF2qgzEiDeMJCcSzKNDm3KxNg0KKWipoKEMDFqfM7LxNwwSVyAOGmrYlE4l3dg290hOMsr9xG7jv9g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-ppc64-gnu": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-11.15.0.tgz", + "integrity": "sha512-zbu9FhvBLW4KJxo7ElFvZWbSt4vP685Qc/Gyk/Ns3g2gR9qh2qWXouH8PWySy+Ko/qJ42+HJCLg+ZNcxikERfg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-riscv64-gnu": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-11.15.0.tgz", + "integrity": "sha512-Kfleehe6B09C2qCnyIU01xLFqFXCHI4ylzkicfX/89j+gNHh9xyNdpEvit88Kq6i5tTGdavVnM6DQfOE2qNtlg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-riscv64-musl": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-11.15.0.tgz", + "integrity": "sha512-J7LPiEt27Tpm8P+qURDwNc8q45+n+mWgyys4/V6r5A8v5gDentHRGUx3iVk5NxdKhgoGulrzQocPTZVosq25Eg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-s390x-gnu": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-11.15.0.tgz", + "integrity": "sha512-+8/d2tAScPjVJNyqa7GPGnqleTB/XW9dZJQ2D/oIM3wpH3TG+DaFEXBbk4QFJ9K9AUGBhvQvWU2mQyhK/yYn3Q==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-x64-gnu": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-gnu/-/binding-linux-x64-gnu-11.15.0.tgz", + "integrity": "sha512-xtvSzH7Nr5MCZI2FKImmOdTl9kzuQ51RPyLh451tvD2qnkg3BaqI9Ox78bTk57YJhlXPuxWSOL5aZhKAc9J6qg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-linux-x64-musl": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-musl/-/binding-linux-x64-musl-11.15.0.tgz", + "integrity": "sha512-14YL1zuXj06+/tqsuUZuzL0T425WA/I4nSVN1kBXeC5WHxem6lQ+2HGvG+crjeJEqHgZUT62YIgj88W+8E7eyg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@oxc-resolver/binding-openharmony-arm64": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-openharmony-arm64/-/binding-openharmony-arm64-11.15.0.tgz", + "integrity": "sha512-/7Qli+1Wk93coxnrQaU8ySlICYN8HsgyIrzqjgIkQEpI//9eUeaeIHZptNl2fMvBGeXa7k2QgLbRNaBRgpnvMw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@oxc-resolver/binding-wasm32-wasi": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-wasm32-wasi/-/binding-wasm32-wasi-11.15.0.tgz", + "integrity": "sha512-q5rn2eIMQLuc/AVGR2rQKb2EVlgreATGG8xXg8f4XbbYCVgpxaq+dgMbiPStyNywW1MH8VU2T09UEm30UtOQvg==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.1.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@oxc-resolver/binding-win32-arm64-msvc": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-11.15.0.tgz", + "integrity": "sha512-yCAh2RWjU/8wWTxQDgGPgzV9QBv0/Ojb5ej1c/58iOjyTuy/J1ZQtYi2SpULjKmwIxLJdTiCHpMilauWimE31w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@oxc-resolver/binding-win32-ia32-msvc": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-11.15.0.tgz", + "integrity": "sha512-lmXKb6lvA6M6QIbtYfgjd+AryJqExZVSY2bfECC18OPu7Lv1mHFF171Mai5l9hG3r4IhHPPIwT10EHoilSCYeA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@oxc-resolver/binding-win32-x64-msvc": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/@oxc-resolver/binding-win32-x64-msvc/-/binding-win32-x64-msvc-11.15.0.tgz", + "integrity": "sha512-HZsfne0s/tGOcJK9ZdTGxsNU2P/dH0Shf0jqrPvsC6wX0Wk+6AyhSpHFLQCnLOuFQiHHU0ePfM8iYsoJb5hHpQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==", + "license": "MIT" + }, + "node_modules/@radix-ui/react-arrow": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz", + "integrity": "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-1.1.12.tgz", + "integrity": "sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.7.tgz", + "integrity": "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-compose-refs": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz", + "integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context-menu": { + "version": "2.2.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context-menu/-/react-context-menu-2.2.16.tgz", + "integrity": "sha512-O8morBEW+HsVG28gYDZPTrT9UUovQUlJue5YO836tiTJhuIWBm/zQHc7j388sHWtdH/xUZurK9olD2+pcqx5ww==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-menu": "2.1.16", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.15.tgz", + "integrity": "sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-direction": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz", + "integrity": "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz", + "integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-escape-keydown": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dropdown-menu": { + "version": "2.1.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.16.tgz", + "integrity": "sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-menu": "2.1.16", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-guards": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz", + "integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-scope": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz", + "integrity": "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-form": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-form/-/react-form-0.1.8.tgz", + "integrity": "sha512-QM70k4Zwjttifr5a4sZFts9fn8FzHYvQ5PiB19O2HsYibaHSVt9fH9rzB0XZo/YcM+b7t/p7lYCT/F5eOeF5yQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-label": "2.1.7", + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-id": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz", + "integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-label": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.7.tgz", + "integrity": "sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-menu": { + "version": "2.1.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.16.tgz", + "integrity": "sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popper": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz", + "integrity": "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==", + "license": "MIT", + "dependencies": { + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-rect": "1.1.1", + "@radix-ui/react-use-size": "1.1.1", + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-portal": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz", + "integrity": "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.1.8.tgz", + "integrity": "sha512-+gISHcSPUJ7ktBy9RnTqbdKW78bcGke3t6taawyZ71pio1JewwGSJizycs7rLhGTvMJYCQB1DBK4KQsxs7U8dA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-context": "1.1.3", + "@radix-ui/react-primitive": "2.1.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-context": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.3.tgz", + "integrity": "sha512-ieIFACdMpYfMEjF0rEf5KLvfVyIkOz6PDGyNnP+u+4xQ6jny3VCgA4OgXOwNx2aUkxn8zx9fiVcM8CfFYv9Lxw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-primitive": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz", + "integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-slot": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.4.tgz", + "integrity": "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz", + "integrity": "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-separator": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.8.tgz", + "integrity": "sha512-sDvqVY4itsKwwSMEe0jtKgfTh+72Sy3gPmQpjqcQneqQ4PFmr/1I0YA+2/puilhggCe2gJcx5EBAYFkWkdpa5g==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-separator/node_modules/@radix-ui/react-primitive": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz", + "integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-separator/node_modules/@radix-ui/react-slot": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.4.tgz", + "integrity": "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-callback-ref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz", + "integrity": "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", + "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-effect-event": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz", + "integrity": "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-escape-keydown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz", + "integrity": "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", + "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz", + "integrity": "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==", + "license": "MIT", + "dependencies": { + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-size": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz", + "integrity": "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz", + "integrity": "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==", + "license": "MIT" + }, + "node_modules/@repeaterjs/repeater": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@repeaterjs/repeater/-/repeater-3.0.6.tgz", + "integrity": "sha512-Javneu5lsuhwNCryN+pXH93VPQ8g0dBX7wItHFgYiwQmzE1sVdg5tWHiOgHywzL2W21XQopa7IwIEnNbmeUJYA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.3.tgz", + "integrity": "sha512-eybk3TjzzzV97Dlj5c+XrBFW57eTNhzod66y9HrBlzJ6NsCrWCp/2kaPS3K9wJmurBC0Tdw4yPjXKZqlznim3Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/pluginutils": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", + "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^2.0.2", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.53.5.tgz", + "integrity": "sha512-iDGS/h7D8t7tvZ1t6+WPK04KD0MwzLZrG0se1hzBjSi5fyxlsiggoJHwh18PCFNn7tG43OWb6pdZ6Y+rMlmyNQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.53.5.tgz", + "integrity": "sha512-wrSAViWvZHBMMlWk6EJhvg8/rjxzyEhEdgfMMjREHEq11EtJ6IP6yfcCH57YAEca2Oe3FNCE9DSTgU70EIGmVw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.5.tgz", + "integrity": "sha512-S87zZPBmRO6u1YXQLwpveZm4JfPpAa6oHBX7/ghSiGH3rz/KDgAu1rKdGutV+WUI6tKDMbaBJomhnT30Y2t4VQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.5.tgz", + "integrity": "sha512-YTbnsAaHo6VrAczISxgpTva8EkfQus0VPEVJCEaboHtZRIb6h6j0BNxRBOwnDciFTZLDPW5r+ZBmhL/+YpTZgA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.53.5.tgz", + "integrity": "sha512-1T8eY2J8rKJWzaznV7zedfdhD1BqVs1iqILhmHDq/bqCUZsrMt+j8VCTHhP0vdfbHK3e1IQ7VYx3jlKqwlf+vw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.53.5.tgz", + "integrity": "sha512-sHTiuXyBJApxRn+VFMaw1U+Qsz4kcNlxQ742snICYPrY+DDL8/ZbaC4DVIB7vgZmp3jiDaKA0WpBdP0aqPJoBQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.53.5.tgz", + "integrity": "sha512-dV3T9MyAf0w8zPVLVBptVlzaXxka6xg1f16VAQmjg+4KMSTWDvhimI/Y6mp8oHwNrmnmVl9XxJ/w/mO4uIQONA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.53.5.tgz", + "integrity": "sha512-wIGYC1x/hyjP+KAu9+ewDI+fi5XSNiUi9Bvg6KGAh2TsNMA3tSEs+Sh6jJ/r4BV/bx/CyWu2ue9kDnIdRyafcQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.53.5.tgz", + "integrity": "sha512-Y+qVA0D9d0y2FRNiG9oM3Hut/DgODZbU9I8pLLPwAsU0tUKZ49cyV1tzmB/qRbSzGvY8lpgGkJuMyuhH7Ma+Vg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.53.5.tgz", + "integrity": "sha512-juaC4bEgJsyFVfqhtGLz8mbopaWD+WeSOYr5E16y+1of6KQjc0BpwZLuxkClqY1i8sco+MdyoXPNiCkQou09+g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.53.5.tgz", + "integrity": "sha512-rIEC0hZ17A42iXtHX+EPJVL/CakHo+tT7W0pbzdAGuWOt2jxDFh7A/lRhsNHBcqL4T36+UiAgwO8pbmn3dE8wA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.53.5.tgz", + "integrity": "sha512-T7l409NhUE552RcAOcmJHj3xyZ2h7vMWzcwQI0hvn5tqHh3oSoclf9WgTl+0QqffWFG8MEVZZP1/OBglKZx52Q==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.53.5.tgz", + "integrity": "sha512-7OK5/GhxbnrMcxIFoYfhV/TkknarkYC1hqUw1wU2xUN3TVRLNT5FmBv4KkheSG2xZ6IEbRAhTooTV2+R5Tk0lQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.53.5.tgz", + "integrity": "sha512-GwuDBE/PsXaTa76lO5eLJTyr2k8QkPipAyOrs4V/KJufHCZBJ495VCGJol35grx9xryk4V+2zd3Ri+3v7NPh+w==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.53.5.tgz", + "integrity": "sha512-IAE1Ziyr1qNfnmiQLHBURAD+eh/zH1pIeJjeShleII7Vj8kyEm2PF77o+lf3WTHDpNJcu4IXJxNO0Zluro8bOw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.5.tgz", + "integrity": "sha512-Pg6E+oP7GvZ4XwgRJBuSXZjcqpIW3yCBhK4BcsANvb47qMvAbCjR6E+1a/U2WXz1JJxp9/4Dno3/iSJLcm5auw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.53.5.tgz", + "integrity": "sha512-txGtluxDKTxaMDzUduGP0wdfng24y1rygUMnmlUJ88fzCCULCLn7oE5kb2+tRB+MWq1QDZT6ObT5RrR8HFRKqg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.53.5.tgz", + "integrity": "sha512-3DFiLPnTxiOQV993fMc+KO8zXHTcIjgaInrqlG8zDp1TlhYl6WgrOHuJkJQ6M8zHEcntSJsUp1XFZSY8C1DYbg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.53.5.tgz", + "integrity": "sha512-nggc/wPpNTgjGg75hu+Q/3i32R00Lq1B6N1DO7MCU340MRKL3WZJMjA9U4K4gzy3dkZPXm9E1Nc81FItBVGRlA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.53.5.tgz", + "integrity": "sha512-U/54pTbdQpPLBdEzCT6NBCFAfSZMvmjr0twhnD9f4EIvlm9wy3jjQ38yQj1AGznrNO65EWQMgm/QUjuIVrYF9w==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.53.5.tgz", + "integrity": "sha512-2NqKgZSuLH9SXBBV2dWNRCZmocgSOx8OJSdpRaEcRlIfX8YrKxUT6z0F1NpvDVhOsl190UFTRh2F2WDWWCYp3A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.53.5.tgz", + "integrity": "sha512-JRpZUhCfhZ4keB5v0fe02gQJy05GqboPOaxvjugW04RLSYYoB/9t2lx2u/tMs/Na/1NXfY8QYjgRljRpN+MjTQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@scarf/scarf": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz", + "integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==", + "hasInstallScript": true, + "license": "Apache-2.0" + }, + "node_modules/@sec-ant/readable-stream": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@sec-ant/readable-stream/-/readable-stream-0.4.1.tgz", + "integrity": "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-4.0.0.tgz", + "integrity": "sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@storybook/addon-docs": { + "version": "10.2.9", + "resolved": "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-10.2.9.tgz", + "integrity": "sha512-rPUSyymt6IDI8PbZVEXPnqOysstp+mKeZiPRxcKIklecDhLUiy5Yc2PQneYY//Gp7Z/CEe54tJHr+WpFgtLPYg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@mdx-js/react": "^3.0.0", + "@storybook/csf-plugin": "10.2.9", + "@storybook/icons": "^2.0.1", + "@storybook/react-dom-shim": "10.2.9", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^10.2.9" + } + }, + "node_modules/@storybook/builder-vite": { + "version": "10.2.9", + "resolved": "https://registry.npmjs.org/@storybook/builder-vite/-/builder-vite-10.2.9.tgz", + "integrity": "sha512-01DvThkchYqHh2GzqFTNrrNsrn3URuHXfHlDt2u+ggqiBKjObxeRhlgZN0ntG9w41Y05mhWH9pRAKbPMGDBIwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/csf-plugin": "10.2.9", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^10.2.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/@storybook/csf-plugin": { + "version": "10.2.9", + "resolved": "https://registry.npmjs.org/@storybook/csf-plugin/-/csf-plugin-10.2.9.tgz", + "integrity": "sha512-vfGZeszuDZc742eQgpA/W6ok54ePYPYz9MLdM6u3XBHJXmNhsjbcwSFTXZHrxjyDn88vXdo+Frg3HBKkOQBnbQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "unplugin": "^2.3.5" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "esbuild": "*", + "rollup": "*", + "storybook": "^10.2.9", + "vite": "*", + "webpack": "*" + }, + "peerDependenciesMeta": { + "esbuild": { + "optional": true + }, + "rollup": { + "optional": true + }, + "vite": { + "optional": true + }, + "webpack": { + "optional": true + } + } + }, + "node_modules/@storybook/global": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@storybook/global/-/global-5.0.0.tgz", + "integrity": "sha512-FcOqPAXACP0I3oJ/ws6/rrPT9WGhu915Cg8D02a9YxLo0DE9zI+a9A5gRGvmQ09fiWPukqI8ZAEoQEdWUKMQdQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@storybook/icons": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@storybook/icons/-/icons-2.0.1.tgz", + "integrity": "sha512-/smVjw88yK3CKsiuR71vNgWQ9+NuY2L+e8X7IMrFjexjm6ZR8ULrV2DRkTA61aV6ryefslzHEGDInGpnNeIocg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/@storybook/react": { + "version": "10.2.9", + "resolved": "https://registry.npmjs.org/@storybook/react/-/react-10.2.9.tgz", + "integrity": "sha512-cYJroaHWHmauPO8EcfpDTU3Odc7z5/DbuLO+jQ4SAaoupwnE35HNe8WAdpD3jpmI20cqKauqOENIT/+HjxhlYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "@storybook/react-dom-shim": "10.2.9", + "react-docgen": "^8.0.2" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "storybook": "^10.2.9", + "typescript": ">= 4.9.x" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@storybook/react-dom-shim": { + "version": "10.2.9", + "resolved": "https://registry.npmjs.org/@storybook/react-dom-shim/-/react-dom-shim-10.2.9.tgz", + "integrity": "sha512-hlvFl0ylK/RZ4GxOXcBfzQNoOm3/x0LEgaPYlkR2/P4CTbKKTc+fHRsZMpFgP/X0g8oZmfm93mdhQdX0zlF8JQ==", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "storybook": "^10.2.9" + } + }, + "node_modules/@storybook/react-vite": { + "version": "10.2.9", + "resolved": "https://registry.npmjs.org/@storybook/react-vite/-/react-vite-10.2.9.tgz", + "integrity": "sha512-uDuo8TeBc3E519xQQGGGaH43TaCEbEgNjDV3vX/G6ikWgJoYzFFILO4EKhKKsf7t3dJl+FIXJEcvPw7Azd0VPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@joshwooding/vite-plugin-react-docgen-typescript": "^0.6.4", + "@rollup/pluginutils": "^5.0.2", + "@storybook/builder-vite": "10.2.9", + "@storybook/react": "10.2.9", + "empathic": "^2.0.0", + "magic-string": "^0.30.0", + "react-docgen": "^8.0.0", + "resolve": "^1.22.8", + "tsconfig-paths": "^4.2.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "storybook": "^10.2.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/@swc/core": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.15.11.tgz", + "integrity": "sha512-iLmLTodbYxU39HhMPaMUooPwO/zqJWvsqkrXv1ZI38rMb048p6N7qtAtTp37sw9NzSrvH6oli8EdDygo09IZ/w==", + "dev": true, + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "@swc/counter": "^0.1.3", + "@swc/types": "^0.1.25" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/swc" + }, + "optionalDependencies": { + "@swc/core-darwin-arm64": "1.15.11", + "@swc/core-darwin-x64": "1.15.11", + "@swc/core-linux-arm-gnueabihf": "1.15.11", + "@swc/core-linux-arm64-gnu": "1.15.11", + "@swc/core-linux-arm64-musl": "1.15.11", + "@swc/core-linux-x64-gnu": "1.15.11", + "@swc/core-linux-x64-musl": "1.15.11", + "@swc/core-win32-arm64-msvc": "1.15.11", + "@swc/core-win32-ia32-msvc": "1.15.11", + "@swc/core-win32-x64-msvc": "1.15.11" + }, + "peerDependencies": { + "@swc/helpers": ">=0.5.17" + }, + "peerDependenciesMeta": { + "@swc/helpers": { + "optional": true + } + } + }, + "node_modules/@swc/core-darwin-arm64": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.11.tgz", + "integrity": "sha512-QoIupRWVH8AF1TgxYyeA5nS18dtqMuxNwchjBIwJo3RdwLEFiJq6onOx9JAxHtuPwUkIVuU2Xbp+jCJ7Vzmgtg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-darwin-x64": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.15.11.tgz", + "integrity": "sha512-S52Gu1QtPSfBYDiejlcfp9GlN+NjTZBRRNsz8PNwBgSE626/FUf2PcllVUix7jqkoMC+t0rS8t+2/aSWlMuQtA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm-gnueabihf": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.15.11.tgz", + "integrity": "sha512-lXJs8oXo6Z4yCpimpQ8vPeCjkgoHu5NoMvmJZ8qxDyU99KVdg6KwU9H79vzrmB+HfH+dCZ7JGMqMF//f8Cfvdg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-gnu": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.15.11.tgz", + "integrity": "sha512-chRsz1K52/vj8Mfq/QOugVphlKPWlMh10V99qfH41hbGvwAU6xSPd681upO4bKiOr9+mRIZZW+EfJqY42ZzRyA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-musl": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.15.11.tgz", + "integrity": "sha512-PYftgsTaGnfDK4m6/dty9ryK1FbLk+LosDJ/RJR2nkXGc8rd+WenXIlvHjWULiBVnS1RsjHHOXmTS4nDhe0v0w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-gnu": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.11.tgz", + "integrity": "sha512-DKtnJKIHiZdARyTKiX7zdRjiDS1KihkQWatQiCHMv+zc2sfwb4Glrodx2VLOX4rsa92NLR0Sw8WLcPEMFY1szQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-musl": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.15.11.tgz", + "integrity": "sha512-mUjjntHj4+8WBaiDe5UwRNHuEzLjIWBTSGTw0JT9+C9/Yyuh4KQqlcEQ3ro6GkHmBGXBFpGIj/o5VMyRWfVfWw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-arm64-msvc": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.15.11.tgz", + "integrity": "sha512-ZkNNG5zL49YpaFzfl6fskNOSxtcZ5uOYmWBkY4wVAvgbSAQzLRVBp+xArGWh2oXlY/WgL99zQSGTv7RI5E6nzA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-ia32-msvc": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.15.11.tgz", + "integrity": "sha512-6XnzORkZCQzvTQ6cPrU7iaT9+i145oLwnin8JrfsLG41wl26+5cNQ2XV3zcbrnFEV6esjOceom9YO1w9mGJByw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-x64-msvc": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.15.11.tgz", + "integrity": "sha512-IQ2n6af7XKLL6P1gIeZACskSxK8jWtoKpJWLZmdXTDj1MGzktUy4i+FvpdtxFmJWNavRWH1VmTr6kAubRDHeKw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "Apache-2.0 AND MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@swc/types": { + "version": "0.1.25", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.25.tgz", + "integrity": "sha512-iAoY/qRhNH8a/hBvm3zKj9qQ4oc2+3w1unPJa2XvTK3XjeLXtzcCingVPw/9e5mn1+0yPqxcBGp9Jf0pkfMb1g==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@swc/counter": "^0.1.3" + } + }, + "node_modules/@tanstack/history": { + "version": "1.145.7", + "resolved": "https://registry.npmjs.org/@tanstack/history/-/history-1.145.7.tgz", + "integrity": "sha512-gMo/ReTUp0a3IOcZoI3hH6PLDC2R/5ELQ7P2yu9F6aEkA0wSQh+Q4qzMrtcKvF2ut0oE+16xWCGDo/TdYd6cEQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/query-core": { + "version": "5.90.20", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.20.tgz", + "integrity": "sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/query-devtools": { + "version": "5.93.0", + "resolved": "https://registry.npmjs.org/@tanstack/query-devtools/-/query-devtools-5.93.0.tgz", + "integrity": "sha512-+kpsx1NQnOFTZsw6HAFCW3HkKg0+2cepGtAWXjiiSOJJ1CtQpt72EE2nyZb+AjAbLRPoeRmPJ8MtQd8r8gsPdg==", + "dev": true, + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/react-query": { + "version": "5.90.21", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.21.tgz", + "integrity": "sha512-0Lu6y5t+tvlTJMTO7oh5NSpJfpg/5D41LlThfepTixPYkJ0sE2Jj0m0f6yYqujBwIXlId87e234+MxG3D3g7kg==", + "license": "MIT", + "dependencies": { + "@tanstack/query-core": "5.90.20" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": "^18 || ^19" + } + }, + "node_modules/@tanstack/react-query-devtools": { + "version": "5.91.3", + "resolved": "https://registry.npmjs.org/@tanstack/react-query-devtools/-/react-query-devtools-5.91.3.tgz", + "integrity": "sha512-nlahjMtd/J1h7IzOOfqeyDh5LNfG0eULwlltPEonYy0QL+nqrBB+nyzJfULV+moL7sZyxc2sHdNJki+vLA9BSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tanstack/query-devtools": "5.93.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "@tanstack/react-query": "^5.90.20", + "react": "^18 || ^19" + } + }, + "node_modules/@tanstack/react-router": { + "version": "1.150.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-router/-/react-router-1.150.0.tgz", + "integrity": "sha512-k/oycTCpBT2XoEk9dNd/nNYhF0X9fLSB10lT40+NVX1TjOtBq5whksk8MT6oRnSoQ8KWeb7La3G9kFaAeSULkA==", + "license": "MIT", + "dependencies": { + "@tanstack/history": "1.145.7", + "@tanstack/react-store": "^0.8.0", + "@tanstack/router-core": "1.150.0", + "isbot": "^5.1.22", + "tiny-invariant": "^1.3.3", + "tiny-warning": "^1.0.3" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": ">=18.0.0 || >=19.0.0", + "react-dom": ">=18.0.0 || >=19.0.0" + } + }, + "node_modules/@tanstack/react-router-devtools": { + "version": "1.150.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-router-devtools/-/react-router-devtools-1.150.0.tgz", + "integrity": "sha512-TlvTE+XK5XVCfYjazoMWkjyyPKe4kMw2nCA7EuWoYUJKOqRW5oKvBY7auViGWxp51FKDEjV3bbok3wPKBYwZww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tanstack/router-devtools-core": "1.150.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "@tanstack/react-router": "^1.150.0", + "@tanstack/router-core": "^1.150.0", + "react": ">=18.0.0 || >=19.0.0", + "react-dom": ">=18.0.0 || >=19.0.0" + }, + "peerDependenciesMeta": { + "@tanstack/router-core": { + "optional": true + } + } + }, + "node_modules/@tanstack/react-store": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-store/-/react-store-0.8.0.tgz", + "integrity": "sha512-1vG9beLIuB7q69skxK9r5xiLN3ztzIPfSQSs0GfeqWGO2tGIyInZx0x1COhpx97RKaONSoAb8C3dxacWksm1ow==", + "license": "MIT", + "dependencies": { + "@tanstack/store": "0.8.0", + "use-sync-external-store": "^1.6.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/@tanstack/router-core": { + "version": "1.150.0", + "resolved": "https://registry.npmjs.org/@tanstack/router-core/-/router-core-1.150.0.tgz", + "integrity": "sha512-cAm44t/tUbfyzaDH+rE/WO4u3AgaZdpJp00xjQ4gNkC2O95ntVHq5fx+4fhtrkKpgdXoKldgk8OK66djiWpuGQ==", + "license": "MIT", + "dependencies": { + "@tanstack/history": "1.145.7", + "@tanstack/store": "^0.8.0", + "cookie-es": "^2.0.0", + "seroval": "^1.4.1", + "seroval-plugins": "^1.4.0", + "tiny-invariant": "^1.3.3", + "tiny-warning": "^1.0.3" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/router-devtools-core": { + "version": "1.150.0", + "resolved": "https://registry.npmjs.org/@tanstack/router-devtools-core/-/router-devtools-core-1.150.0.tgz", + "integrity": "sha512-61V+4fq2fOPru/48cuojKvWhQx2h/nuj4nVHwzu9E7O8h391h4Hks6axxRbY98/rIz96mn5TCoc0aYuoga53bg==", + "dev": true, + "license": "MIT", + "dependencies": { + "clsx": "^2.1.1", + "goober": "^2.1.16", + "tiny-invariant": "^1.3.3" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "@tanstack/router-core": "^1.150.0", + "csstype": "^3.0.10" + }, + "peerDependenciesMeta": { + "csstype": { + "optional": true + } + } + }, + "node_modules/@tanstack/router-generator": { + "version": "1.150.0", + "resolved": "https://registry.npmjs.org/@tanstack/router-generator/-/router-generator-1.150.0.tgz", + "integrity": "sha512-WsA1bN5/I+cxE6V1DkU5ABIPBQxZLlxszElYgnIhs884tzukv76rYMFOy6Xqd51YIFdYtjDrxZbp4/vfkrVCug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tanstack/router-core": "1.150.0", + "@tanstack/router-utils": "1.143.11", + "@tanstack/virtual-file-routes": "1.145.4", + "prettier": "^3.5.0", + "recast": "^0.23.11", + "source-map": "^0.7.4", + "tsx": "^4.19.2", + "zod": "^3.24.2" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/router-plugin": { + "version": "1.150.0", + "resolved": "https://registry.npmjs.org/@tanstack/router-plugin/-/router-plugin-1.150.0.tgz", + "integrity": "sha512-k2NLysBXO4Wpt4Oo0xeBhNtFsMwHOU8ud48/cWNWbV89QAjlk0XU5CGNj2JEaFMT0zlF3H/aM5/h0+vYnDjFFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.5", + "@babel/plugin-syntax-jsx": "^7.27.1", + "@babel/plugin-syntax-typescript": "^7.27.1", + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5", + "@tanstack/router-core": "1.150.0", + "@tanstack/router-generator": "1.150.0", + "@tanstack/router-utils": "1.143.11", + "@tanstack/virtual-file-routes": "1.145.4", + "babel-dead-code-elimination": "^1.0.11", + "chokidar": "^3.6.0", + "unplugin": "^2.1.2", + "zod": "^3.24.2" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "@rsbuild/core": ">=1.0.2", + "@tanstack/react-router": "^1.150.0", + "vite": ">=5.0.0 || >=6.0.0 || >=7.0.0", + "vite-plugin-solid": "^2.11.10", + "webpack": ">=5.92.0" + }, + "peerDependenciesMeta": { + "@rsbuild/core": { + "optional": true + }, + "@tanstack/react-router": { + "optional": true + }, + "vite": { + "optional": true + }, + "vite-plugin-solid": { + "optional": true + }, + "webpack": { + "optional": true + } + } + }, + "node_modules/@tanstack/router-utils": { + "version": "1.143.11", + "resolved": "https://registry.npmjs.org/@tanstack/router-utils/-/router-utils-1.143.11.tgz", + "integrity": "sha512-N24G4LpfyK8dOlnP8BvNdkuxg1xQljkyl6PcrdiPSA301pOjatRT1y8wuCCJZKVVD8gkd0MpCZ0VEjRMGILOtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.5", + "@babel/generator": "^7.28.5", + "@babel/parser": "^7.28.5", + "ansis": "^4.1.0", + "diff": "^8.0.2", + "pathe": "^2.0.3", + "tinyglobby": "^0.2.15" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/store": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@tanstack/store/-/store-0.8.0.tgz", + "integrity": "sha512-Om+BO0YfMZe//X2z0uLF2j+75nQga6TpTJgLJQBiq85aOyZNIhkCgleNcud2KQg4k4v9Y9l+Uhru3qWMPGTOzQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/virtual-file-routes": { + "version": "1.145.4", + "resolved": "https://registry.npmjs.org/@tanstack/virtual-file-routes/-/virtual-file-routes-1.145.4.tgz", + "integrity": "sha512-CI75JrfqSluhdGwLssgVeQBaCphgfkMQpi8MCY3UJX1hoGzXa8kHYJcUuIFMOLs1q7zqHy++EVVtMK03osR5wQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@testing-library/dom": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.1.tgz", + "integrity": "sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^5.0.1", + "aria-query": "5.3.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.5.0", + "picocolors": "1.1.1", + "pretty-format": "^27.0.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@testing-library/jest-dom": { + "version": "6.9.1", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.9.1.tgz", + "integrity": "sha512-zIcONa+hVtVSSep9UT3jZ5rizo2BsxgyDYU7WFD5eICBE7no3881HGeb/QkGfsJs6JTkY1aQhT7rIPC7e+0nnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@adobe/css-tools": "^4.4.0", + "aria-query": "^5.0.0", + "css.escape": "^1.5.1", + "dom-accessibility-api": "^0.6.3", + "picocolors": "^1.1.1", + "redent": "^3.0.0" + }, + "engines": { + "node": ">=14", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/dom-accessibility-api": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz", + "integrity": "sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@testing-library/react": { + "version": "16.3.1", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.1.tgz", + "integrity": "sha512-gr4KtAWqIOQoucWYD/f6ki+j5chXfcPc74Col/6poTyqTmn7zRmodWahWRCp8tYd+GMqBonw6hstNzqjbs6gjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.12.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@testing-library/dom": "^10.0.0", + "@types/react": "^18.0.0 || ^19.0.0", + "@types/react-dom": "^18.0.0 || ^19.0.0", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@testing-library/user-event": { + "version": "14.6.1", + "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.6.1.tgz", + "integrity": "sha512-vq7fv0rnt+QTXgPxr5Hjc210p6YKq2kmdziLgnsZGgLJ9e6VAShx1pACLuRjd/AS/sr7phAR58OIIpf0LlmQNw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12", + "npm": ">=6" + }, + "peerDependencies": { + "@testing-library/dom": ">=7.21.4" + } + }, + "node_modules/@theguild/federation-composition": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@theguild/federation-composition/-/federation-composition-0.21.1.tgz", + "integrity": "sha512-iw1La4tbRaWKBgz+J9b1ydxv+kgt+7n04ZgD8HSeDJodLsLAxbXj/gLif5f2vyMa98ommBQ73ztBe8zOzGq5YQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "constant-case": "^3.0.4", + "debug": "4.4.3", + "json5": "^2.2.3", + "lodash.sortby": "^4.7.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "graphql": "^16.0.0" + } + }, + "node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/aria-query": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz", + "integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/chai": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/doctrine": { + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/@types/doctrine/-/doctrine-0.0.9.tgz", + "integrity": "sha512-eOIHzCUSH7SMfonMG1LsC2f8vxBFtho6NGBznK41R84YzPuvSBzrhEps33IsQiOW9+VL6NQ9DbjQJznk/S4uRA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mdx": { + "version": "2.0.13", + "resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.13.tgz", + "integrity": "sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "25.2.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.2.3.tgz", + "integrity": "sha512-m0jEgYlYz+mDJZ2+F4v8D1AyQb+QzsNqRuI7xg1VQX/KlKS0qT9r1Mo16yo5F/MtifXFgaofIFsdFMox2SxIbQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/react": { + "version": "19.2.14", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz", + "integrity": "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "19.2.3", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz", + "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", + "devOptional": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^19.2.0" + } + }, + "node_modules/@types/resolve": { + "version": "1.20.6", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.6.tgz", + "integrity": "sha512-A4STmOXPhMUtHH+S6ymgE2GiBSMqf4oTvcQZMcHzokuTLVYzXTB8ttjcgxOVaAp2lGwEdzZ0J+cRbbeevQj1UQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/statuses": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/statuses/-/statuses-2.0.6.tgz", + "integrity": "sha512-xMAgYwceFhRA2zY+XbEA7mxYbA093wdiW8Vu6gZPGWy9cmOyU9XesH1tNcEWsKFd5Vzrqx5T3D38PWx1FIIXkA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/swagger-ui-dist": { + "version": "3.30.6", + "resolved": "https://registry.npmjs.org/@types/swagger-ui-dist/-/swagger-ui-dist-3.30.6.tgz", + "integrity": "sha512-FVxN7wjLYRtJsZBscOcOcf8oR++m38vbUFjT33Mr9HBuasX9bRDrJsp7iwixcOtKSHEEa2B7o2+4wEiXqC+Ebw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/whatwg-mimetype": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/whatwg-mimetype/-/whatwg-mimetype-3.0.2.tgz", + "integrity": "sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@vector-im/compound-design-tokens": { + "version": "6.4.3", + "resolved": "https://registry.npmjs.org/@vector-im/compound-design-tokens/-/compound-design-tokens-6.4.3.tgz", + "integrity": "sha512-Zyqag2QCAfpRipLHSX/LEXj6OGFgBpilMQvNdr3a0iXeGASfkM3HZGid077kMRp9lvyHiQfPIs1BdllENwDSNQ==", + "license": "SEE LICENSE IN README.md", + "peerDependencies": { + "@types/react": "*", + "react": "^17 || ^18 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "react": { + "optional": true + } + } + }, + "node_modules/@vector-im/compound-web": { + "version": "8.3.5", + "resolved": "https://registry.npmjs.org/@vector-im/compound-web/-/compound-web-8.3.5.tgz", + "integrity": "sha512-MbqnZMSwu3y8Z7b529Hf8t790adbcjQiq6X+PrX8emvRDsQsSa/S5Na1udStseCuLp3nAfqrgnospIgselbpvA==", + "license": "SEE LICENSE IN README.md", + "dependencies": { + "@floating-ui/react": "^0.27.0", + "@radix-ui/react-context-menu": "^2.2.16", + "@radix-ui/react-dropdown-menu": "^2.1.1", + "@radix-ui/react-form": "^0.1.0", + "@radix-ui/react-progress": "^1.1.0", + "@radix-ui/react-separator": "^1.1.0", + "@radix-ui/react-slot": "^1.1.0", + "classnames": "^2.5.1", + "vaul": "^1.0.0" + }, + "peerDependencies": { + "@fontsource/inconsolata": "^5", + "@fontsource/inter": "^5", + "@types/react": "*", + "@vector-im/compound-design-tokens": ">=1.6.1 <7.0.0", + "react": "^18 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.4.tgz", + "integrity": "sha512-VIcFLdRi/VYRU8OL/puL7QXMYafHmqOnwTZY50U1JPlCNj30PxCMx65c494b1K9be9hX83KVt0+gTEwTWLqToA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.29.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-rc.3", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.18.0" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/@vitest/coverage-v8": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.0.18.tgz", + "integrity": "sha512-7i+N2i0+ME+2JFZhfuz7Tg/FqKtilHjGyGvoHYQ6iLV0zahbsJ9sljC9OcFcPDbhYKCet+sG8SsVqlyGvPflZg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^1.0.2", + "@vitest/utils": "4.0.18", + "ast-v8-to-istanbul": "^0.3.10", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-reports": "^3.2.0", + "magicast": "^0.5.1", + "obug": "^2.1.1", + "std-env": "^3.10.0", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@vitest/browser": "4.0.18", + "vitest": "4.0.18" + }, + "peerDependenciesMeta": { + "@vitest/browser": { + "optional": true + } + } + }, + "node_modules/@vitest/expect": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/expect/node_modules/@vitest/pretty-format": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/expect/node_modules/@vitest/utils": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/expect/node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@vitest/pretty-format": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.18.tgz", + "integrity": "sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.18.tgz", + "integrity": "sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "4.0.18", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.18.tgz", + "integrity": "sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.0.18", + "magic-string": "^0.30.21", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyspy": "^4.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.18.tgz", + "integrity": "sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.0.18", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@whatwg-node/disposablestack": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/@whatwg-node/disposablestack/-/disposablestack-0.0.6.tgz", + "integrity": "sha512-LOtTn+JgJvX8WfBVJtF08TGrdjuFzGJc4mkP8EdDI8ADbvO7kiexYep1o8dwnt0okb0jYclCDXF13xU7Ge4zSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@whatwg-node/promise-helpers": "^1.0.0", + "tslib": "^2.6.3" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@whatwg-node/fetch": { + "version": "0.10.13", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.10.13.tgz", + "integrity": "sha512-b4PhJ+zYj4357zwk4TTuF2nEe0vVtOrwdsrNo5hL+u1ojXNhh1FgJ6pg1jzDlwlT4oBdzfSwaBwMCtFCsIWg8Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@whatwg-node/node-fetch": "^0.8.3", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@whatwg-node/node-fetch": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.8.4.tgz", + "integrity": "sha512-AlKLc57loGoyYlrzDbejB9EeR+pfdJdGzbYnkEuZaGekFboBwzfVYVMsy88PMriqPI1ORpiGYGgSSWpx7a2sDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@fastify/busboy": "^3.1.1", + "@whatwg-node/disposablestack": "^0.0.6", + "@whatwg-node/promise-helpers": "^1.3.2", + "tslib": "^2.6.3" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@whatwg-node/promise-helpers": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@whatwg-node/promise-helpers/-/promise-helpers-1.3.2.tgz", + "integrity": "sha512-Nst5JdK47VIl9UcGwtv2Rcgyn5lWtZ0/mhRQ4G8NN2isxpq2TO30iqHzmwoJycjWuyUfg3GFXqP/gFHXeV57IA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.6.3" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@zxcvbn-ts/core": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@zxcvbn-ts/core/-/core-3.0.4.tgz", + "integrity": "sha512-aQeiT0F09FuJaAqNrxynlAwZ2mW/1MdXakKWNmGM1Qp/VaY6CnB/GfnMS2T8gB2231Esp1/maCWd8vTG4OuShw==", + "license": "MIT", + "dependencies": { + "fastest-levenshtein": "1.0.16" + } + }, + "node_modules/@zxcvbn-ts/language-common": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@zxcvbn-ts/language-common/-/language-common-3.0.4.tgz", + "integrity": "sha512-viSNNnRYtc7ULXzxrQIVUNwHAPSXRtoIwy/Tq4XQQdIknBzw4vz36lQLF6mvhMlTIlpjoN/Z1GFu/fwiAlUSsw==", + "license": "MIT" + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ansi-escapes": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.2.0.tgz", + "integrity": "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "environment": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/ansis": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/ansis/-/ansis-4.2.0.tgz", + "integrity": "sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true, + "license": "MIT" + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/anymatch/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true, + "license": "MIT" + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/aria-hidden": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz", + "integrity": "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "dev": true, + "license": "MIT" + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/ast-types": { + "version": "0.16.1", + "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.16.1.tgz", + "integrity": "sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ast-v8-to-istanbul": { + "version": "0.3.11", + "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.11.tgz", + "integrity": "sha512-Qya9fkoofMjCBNVdWINMjB5KZvkYfaO9/anwkWnjxibpWUxo5iHl2sOdP7/uAqaRuUYuoo8rDwnbaaKVFxoUvw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.31", + "estree-walker": "^3.0.3", + "js-tokens": "^10.0.0" + } + }, + "node_modules/ast-v8-to-istanbul/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/ast-v8-to-istanbul/node_modules/js-tokens": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-10.0.0.tgz", + "integrity": "sha512-lM/UBzQmfJRo9ABXbPWemivdCW8V2G8FHaHdypQaIy523snUjog0W71ayWXTjiR+ixeMyVHN2XcpnTd/liPg/Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/auto-bind": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/auto-bind/-/auto-bind-4.0.0.tgz", + "integrity": "sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/autoprefixer": { + "version": "10.4.23", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.23.tgz", + "integrity": "sha512-YYTXSFulfwytnjAPlw8QHncHJmlvFKtczb8InXaAx9Q0LbfDnfEYDE55omerIJKihhmU61Ft+cAOSzQVaBUmeA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001760", + "fraction.js": "^5.3.4", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/babel-dead-code-elimination": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/babel-dead-code-elimination/-/babel-dead-code-elimination-1.0.11.tgz", + "integrity": "sha512-mwq3W3e/pKSI6TG8lXMiDWvEi1VXYlSBlJlB3l+I0bAb5u1RNUl88udos85eOPNK3m5EXK9uO7d2g08pesTySQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.23.7", + "@babel/parser": "^7.23.6", + "@babel/traverse": "^7.23.7", + "@babel/types": "^7.23.6" + } + }, + "node_modules/babel-plugin-syntax-trailing-function-commas": { + "version": "7.0.0-beta.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-7.0.0-beta.0.tgz", + "integrity": "sha512-Xj9XuRuz3nTSbaTXWv3itLOcxyF4oPD8douBBmj7U9BBC6nEBYfyOJYQMf/8PJAFotC62UY5dFfIGEPr7WswzQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/babel-preset-fbjs": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/babel-preset-fbjs/-/babel-preset-fbjs-3.4.0.tgz", + "integrity": "sha512-9ywCsCvo1ojrw0b+XYk7aFvTH6D9064t0RIL1rtMf3nsa02Xw41MS7sZw216Im35xj/UY0PDBQsa1brUDDF1Ow==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/plugin-proposal-class-properties": "^7.0.0", + "@babel/plugin-proposal-object-rest-spread": "^7.0.0", + "@babel/plugin-syntax-class-properties": "^7.0.0", + "@babel/plugin-syntax-flow": "^7.0.0", + "@babel/plugin-syntax-jsx": "^7.0.0", + "@babel/plugin-syntax-object-rest-spread": "^7.0.0", + "@babel/plugin-transform-arrow-functions": "^7.0.0", + "@babel/plugin-transform-block-scoped-functions": "^7.0.0", + "@babel/plugin-transform-block-scoping": "^7.0.0", + "@babel/plugin-transform-classes": "^7.0.0", + "@babel/plugin-transform-computed-properties": "^7.0.0", + "@babel/plugin-transform-destructuring": "^7.0.0", + "@babel/plugin-transform-flow-strip-types": "^7.0.0", + "@babel/plugin-transform-for-of": "^7.0.0", + "@babel/plugin-transform-function-name": "^7.0.0", + "@babel/plugin-transform-literals": "^7.0.0", + "@babel/plugin-transform-member-expression-literals": "^7.0.0", + "@babel/plugin-transform-modules-commonjs": "^7.0.0", + "@babel/plugin-transform-object-super": "^7.0.0", + "@babel/plugin-transform-parameters": "^7.0.0", + "@babel/plugin-transform-property-literals": "^7.0.0", + "@babel/plugin-transform-react-display-name": "^7.0.0", + "@babel/plugin-transform-react-jsx": "^7.0.0", + "@babel/plugin-transform-shorthand-properties": "^7.0.0", + "@babel/plugin-transform-spread": "^7.0.0", + "@babel/plugin-transform-template-literals": "^7.0.0", + "babel-plugin-syntax-trailing-function-commas": "^7.0.0-beta.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.8", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.8.tgz", + "integrity": "sha512-Y1fOuNDowLfgKOypdc9SPABfoWXuZHBOyCS4cD52IeZBhr4Md6CLLs6atcxVrzRmQ06E7hSlm5bHHApPKR/byA==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/browserslist-to-esbuild": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/browserslist-to-esbuild/-/browserslist-to-esbuild-2.1.1.tgz", + "integrity": "sha512-KN+mty6C3e9AN8Z5dI1xeN15ExcRNeISoC3g7V0Kax/MMF9MSoYA2G7lkTTcVUFntiEjkpI0HNgqJC1NjdyNUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "meow": "^13.0.0" + }, + "bin": { + "browserslist-to-esbuild": "cli/index.js" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "browserslist": "*" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/bundle-name": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-4.1.0.tgz", + "integrity": "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "run-applescript": "^7.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camel-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "pascal-case": "^3.1.2", + "tslib": "^2.0.3" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001760", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001760.tgz", + "integrity": "sha512-7AAMPcueWELt1p3mi13HR/LHH0TJLT11cnwDJEs3xA4+CK/PLKeO9Kl1oru24htkyUKtkGCvAx4ohB0Ttry8Dw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/capital-case": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/capital-case/-/capital-case-1.0.4.tgz", + "integrity": "sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A==", + "dev": true, + "license": "MIT", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3", + "upper-case-first": "^2.0.2" + } + }, + "node_modules/chai": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz", + "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/change-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/change-case/-/change-case-4.1.2.tgz", + "integrity": "sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "camel-case": "^4.1.2", + "capital-case": "^1.0.4", + "constant-case": "^3.0.4", + "dot-case": "^3.0.4", + "header-case": "^2.0.4", + "no-case": "^3.0.4", + "param-case": "^3.0.4", + "pascal-case": "^3.1.2", + "path-case": "^3.0.4", + "sentence-case": "^3.0.4", + "snake-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/change-case-all": { + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/change-case-all/-/change-case-all-1.0.15.tgz", + "integrity": "sha512-3+GIFhk3sNuvFAJKU46o26OdzudQlPNBCu1ZQi3cMeMHhty1bhDxu2WrEilVNYaGvqUtR1VSigFcJOiS13dRhQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "change-case": "^4.1.2", + "is-lower-case": "^2.0.2", + "is-upper-case": "^2.0.2", + "lower-case": "^2.0.2", + "lower-case-first": "^2.0.2", + "sponge-case": "^1.0.1", + "swap-case": "^2.0.2", + "title-case": "^3.0.3", + "upper-case": "^2.0.2", + "upper-case-first": "^2.0.2" + } + }, + "node_modules/chardet": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.1.tgz", + "integrity": "sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/check-error": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", + "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 16" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/classnames": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==", + "license": "MIT" + }, + "node_modules/cli-cursor": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", + "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", + "dev": true, + "license": "MIT", + "dependencies": { + "restore-cursor": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-spinners": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-3.4.0.tgz", + "integrity": "sha512-bXfOC4QcT1tKXGorxL3wbJm6XJPDqEnij2gQ2m7ESQuE+/z9YFIWnl/5RpTiKWbMq3EVKR4fRLJGn6DVfu0mpw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-5.1.1.tgz", + "integrity": "sha512-SroPvNHxUnk+vIW/dOSfNqdy1sPEFkrTk6TUtqLCnBlo3N7TNYYkzzN7uSD6+jVjrdO4+p8nH7JzH6cIvUem6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "slice-ansi": "^7.1.0", + "string-width": "^8.0.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate/node_modules/string-width": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz", + "integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-east-asian-width": "^1.3.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-width": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", + "integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 12" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/cliui/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/commander": { + "version": "14.0.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.3.tgz", + "integrity": "sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20" + } + }, + "node_modules/common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/constant-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/constant-case/-/constant-case-3.0.4.tgz", + "integrity": "sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3", + "upper-case": "^2.0.2" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/cookie-es": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/cookie-es/-/cookie-es-2.0.0.tgz", + "integrity": "sha512-RAj4E421UYRgqokKUmotqAwuplYw15qtdXfY+hGzgCJ/MBjCVZcSoHK/kH9kocfjRjcDME7IiDWR/1WX1TM2Pg==", + "license": "MIT" + }, + "node_modules/cosmiconfig": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.0.tgz", + "integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==", + "dev": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.1", + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/cross-fetch": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.2.0.tgz", + "integrity": "sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "node-fetch": "^2.7.0" + } + }, + "node_modules/cross-inspect": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/cross-inspect/-/cross-inspect-1.0.1.tgz", + "integrity": "sha512-Pcw1JTvZLSJH83iiGWt6fRcT+BjZlCDRVwYLbUcHzv/CRpB7r0MlSrGbIyQvVSNyGnbt7G4AXuyCiDR3POvZ1A==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css.escape": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz", + "integrity": "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/dataloader": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/dataloader/-/dataloader-2.2.3.tgz", + "integrity": "sha512-y2krtASINtPFS1rSDjacrFgn1dcUuoREVabwlOGOe4SdxenREqwjwjElAdwvbGM7kgZz9a3KVicWR7vcz8rnzA==", + "dev": true, + "license": "MIT" + }, + "node_modules/date-fns": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz", + "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" + } + }, + "node_modules/debounce": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/debounce/-/debounce-2.2.0.tgz", + "integrity": "sha512-Xks6RUDLZFdz8LIdR6q0MTH44k7FikOmnh5xkSjMig6ch45afc8sjTjRQf3P6ax8dMgcQrYO/AR2RGWURrruqw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/deep-eql": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/default-browser": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-5.4.0.tgz", + "integrity": "sha512-XDuvSq38Hr1MdN47EDvYtx3U0MTqpCEn+F6ft8z2vYDzMrvQhVp0ui9oQdqW3MvK3vqUETglt1tVGgjLuJ5izg==", + "dev": true, + "license": "MIT", + "dependencies": { + "bundle-name": "^4.1.0", + "default-browser-id": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-5.0.1.tgz", + "integrity": "sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/dependency-graph": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/dependency-graph/-/dependency-graph-1.0.0.tgz", + "integrity": "sha512-cW3gggJ28HZ/LExwxP2B++aiKxhJXMSIt9K48FOXQkm+vuG5gyatXnLsONRJdzO/7VfjDIiaOOa/bs4l464Lwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/detect-indent": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", + "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/detect-node-es": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", + "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", + "license": "MIT" + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/diff": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.3.tgz", + "integrity": "sha512-qejHi7bcSD4hQAZE0tNAawRK1ZtafHDmMTMkrrIGgSLl7hTnQHmKCeB45xAcbfTqK2zowkM3j3bHt/4b/ARbYQ==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true, + "license": "MIT" + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dom-accessibility-api": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz", + "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/dot-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", + "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/dset": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/dset/-/dset-3.1.4.tgz", + "integrity": "sha512-2QF/g9/zTaPDc3BjNcVTGoBbXBgYfMTTceLaYcFJ/W9kggFUkhxD/hMEeuLKbugyef9SqAx8cpgwlIP/jinUTA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.267", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", + "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "dev": true, + "license": "ISC" + }, + "node_modules/empathic": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/empathic/-/empathic-2.0.0.tgz", + "integrity": "sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/environment": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", + "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/error-ex": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.1.tgz", + "integrity": "sha512-yY35KZckJJuVVPXpvjgxiCuVEJT67F6zDeVTv4rizyPrfGBUpZQsvmxnN+C371c2esD/hNMjj4tpBhuueLN7aA==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.1", + "@esbuild/android-arm": "0.27.1", + "@esbuild/android-arm64": "0.27.1", + "@esbuild/android-x64": "0.27.1", + "@esbuild/darwin-arm64": "0.27.1", + "@esbuild/darwin-x64": "0.27.1", + "@esbuild/freebsd-arm64": "0.27.1", + "@esbuild/freebsd-x64": "0.27.1", + "@esbuild/linux-arm": "0.27.1", + "@esbuild/linux-arm64": "0.27.1", + "@esbuild/linux-ia32": "0.27.1", + "@esbuild/linux-loong64": "0.27.1", + "@esbuild/linux-mips64el": "0.27.1", + "@esbuild/linux-ppc64": "0.27.1", + "@esbuild/linux-riscv64": "0.27.1", + "@esbuild/linux-s390x": "0.27.1", + "@esbuild/linux-x64": "0.27.1", + "@esbuild/netbsd-arm64": "0.27.1", + "@esbuild/netbsd-x64": "0.27.1", + "@esbuild/openbsd-arm64": "0.27.1", + "@esbuild/openbsd-x64": "0.27.1", + "@esbuild/openharmony-arm64": "0.27.1", + "@esbuild/sunos-x64": "0.27.1", + "@esbuild/win32-arm64": "0.27.1", + "@esbuild/win32-ia32": "0.27.1", + "@esbuild/win32-x64": "0.27.1" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "dev": true, + "license": "MIT" + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "dev": true, + "license": "MIT" + }, + "node_modules/execa": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-9.6.1.tgz", + "integrity": "sha512-9Be3ZoN4LmYR90tUoVu2te2BsbzHfhJyfEiAVfz7N5/zv+jduIfLrV2xdQXOHbaD6KgpGdO9PRPM1Y4Q9QkPkA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/merge-streams": "^4.0.0", + "cross-spawn": "^7.0.6", + "figures": "^6.1.0", + "get-stream": "^9.0.0", + "human-signals": "^8.0.1", + "is-plain-obj": "^4.1.0", + "is-stream": "^4.0.1", + "npm-run-path": "^6.0.0", + "pretty-ms": "^9.2.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^4.0.0", + "yoctocolors": "^2.1.1" + }, + "engines": { + "node": "^18.19.0 || >=20.5.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/expect-type": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastest-levenshtein": { + "version": "1.0.16", + "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz", + "integrity": "sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==", + "license": "MIT", + "engines": { + "node": ">= 4.9.1" + } + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/fbjs": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-3.0.5.tgz", + "integrity": "sha512-ztsSx77JBtkuMrEypfhgc3cI0+0h+svqeie7xHbh1k/IKdcydnvadp/mUaGgjAOXQmQSxsqgaRhS3q9fy+1kxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-fetch": "^3.1.5", + "fbjs-css-vars": "^1.0.0", + "loose-envify": "^1.0.0", + "object-assign": "^4.1.0", + "promise": "^7.1.1", + "setimmediate": "^1.0.5", + "ua-parser-js": "^1.0.35" + } + }, + "node_modules/fbjs-css-vars": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz", + "integrity": "sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/fd-package-json": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fd-package-json/-/fd-package-json-2.0.0.tgz", + "integrity": "sha512-jKmm9YtsNXN789RS/0mSzOC1NUq9mkVd65vbSSVsKdjGvYXBuE4oWe2QOEoFeRmJg+lPuZxpmrfFclNhoRMneQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "walk-up-path": "^4.0.0" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, + "node_modules/figures": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-6.1.0.tgz", + "integrity": "sha512-d+l3qxjSesT4V7v2fh+QnmFnUWv9lSpjarhShNTgBOfA0ttejbQUAlHLitbjkoRiDulW0OPoQPYIGhIC8ohejg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-unicode-supported": "^2.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/formatly": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/formatly/-/formatly-0.3.0.tgz", + "integrity": "sha512-9XNj/o4wrRFyhSMJOvsuyMwy8aUfBaZ1VrqHVfohyXf0Sw0e+yfKG+xZaY3arGCOMdwFsqObtzVOc1gU9KiT9w==", + "dev": true, + "license": "MIT", + "dependencies": { + "fd-package-json": "^2.0.0" + }, + "bin": { + "formatly": "bin/index.mjs" + }, + "engines": { + "node": ">=18.3.0" + } + }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, + "node_modules/fraction.js": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", + "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-east-asian-width": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz", + "integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-nonce": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", + "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/get-stream": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-9.0.1.tgz", + "integrity": "sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sec-ant/readable-stream": "^0.4.1", + "is-stream": "^4.0.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-tsconfig": { + "version": "4.13.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.0.tgz", + "integrity": "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "13.0.4", + "resolved": "https://registry.npmjs.org/glob/-/glob-13.0.4.tgz", + "integrity": "sha512-KACie1EOs9BIOMtenFaxwmYODWA3/fTfGSUnLhMJpXRntu1g+uL/Xvub5f8SCTppvo9q62Qy4LeOoUiaL54G5A==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "minimatch": "^10.2.1", + "minipass": "^7.1.2", + "path-scurry": "^2.0.0" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob/node_modules/balanced-match": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.2.tgz", + "integrity": "sha512-x0K50QvKQ97fdEz2kPehIerj+YTeptKF9hyYkKf6egnwmMWAkADiO0QCzSp0R5xN8FTZgYaBfSaue46Ej62nMg==", + "dev": true, + "license": "MIT", + "dependencies": { + "jackspeak": "^4.2.3" + }, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.2.tgz", + "integrity": "sha512-Pdk8c9poy+YhOgVWw1JNN22/HcivgKWwpxKq04M/jTmHyCZn12WPJebZxdjSa5TmBqISrUSgNYU3eRORljfCCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "10.2.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.1.tgz", + "integrity": "sha512-MClCe8IL5nRRmawL6ib/eT4oLyeKMGCghibcDWK+J0hh0Q8kqSdia6BvbRMVk6mPa6WqUa5uR2oxt6C5jd533A==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "brace-expansion": "^5.0.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/goober": { + "version": "2.1.18", + "resolved": "https://registry.npmjs.org/goober/-/goober-2.1.18.tgz", + "integrity": "sha512-2vFqsaDVIT9Gz7N6kAL++pLpp41l3PfDuusHcjnGLfR6+huZkl6ziX+zgVC3ZxpqWhzH6pyDdGrCeDhMIvwaxw==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "csstype": "^3.0.10" + } + }, + "node_modules/graphql": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.12.0.tgz", + "integrity": "sha512-DKKrynuQRne0PNpEbzuEdHlYOMksHSUI8Zc9Unei5gTsMNA2/vMpoMz/yKba50pejK56qj98qM0SjYxAKi13gQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" + } + }, + "node_modules/graphql-config": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/graphql-config/-/graphql-config-5.1.5.tgz", + "integrity": "sha512-mG2LL1HccpU8qg5ajLROgdsBzx/o2M6kgI3uAmoaXiSH9PCUbtIyLomLqUtCFaAeG2YCFsl0M5cfQ9rKmDoMVA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/graphql-file-loader": "^8.0.0", + "@graphql-tools/json-file-loader": "^8.0.0", + "@graphql-tools/load": "^8.1.0", + "@graphql-tools/merge": "^9.0.0", + "@graphql-tools/url-loader": "^8.0.0", + "@graphql-tools/utils": "^10.0.0", + "cosmiconfig": "^8.1.0", + "jiti": "^2.0.0", + "minimatch": "^9.0.5", + "string-env-interpolation": "^1.0.1", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">= 16.0.0" + }, + "peerDependencies": { + "cosmiconfig-toml-loader": "^1.0.0", + "graphql": "^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + }, + "peerDependenciesMeta": { + "cosmiconfig-toml-loader": { + "optional": true + } + } + }, + "node_modules/graphql-config/node_modules/@graphql-hive/signal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@graphql-hive/signal/-/signal-1.0.0.tgz", + "integrity": "sha512-RiwLMc89lTjvyLEivZ/qxAC5nBHoS2CtsWFSOsN35sxG9zoo5Z+JsFHM8MlvmO9yt+MJNIyC5MLE1rsbOphlag==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/graphql-config/node_modules/@graphql-tools/batch-execute": { + "version": "9.0.19", + "resolved": "https://registry.npmjs.org/@graphql-tools/batch-execute/-/batch-execute-9.0.19.tgz", + "integrity": "sha512-VGamgY4PLzSx48IHPoblRw0oTaBa7S26RpZXt0Y4NN90ytoE0LutlpB2484RbkfcTjv9wa64QD474+YP1kEgGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/utils": "^10.9.1", + "@whatwg-node/promise-helpers": "^1.3.0", + "dataloader": "^2.2.3", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/graphql-config/node_modules/@graphql-tools/delegate": { + "version": "10.2.23", + "resolved": "https://registry.npmjs.org/@graphql-tools/delegate/-/delegate-10.2.23.tgz", + "integrity": "sha512-xrPtl7f1LxS+B6o+W7ueuQh67CwRkfl+UKJncaslnqYdkxKmNBB4wnzVcW8ZsRdwbsla/v43PtwAvSlzxCzq2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/batch-execute": "^9.0.19", + "@graphql-tools/executor": "^1.4.9", + "@graphql-tools/schema": "^10.0.25", + "@graphql-tools/utils": "^10.9.1", + "@repeaterjs/repeater": "^3.0.6", + "@whatwg-node/promise-helpers": "^1.3.0", + "dataloader": "^2.2.3", + "dset": "^3.1.2", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/graphql-config/node_modules/@graphql-tools/executor-common": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-common/-/executor-common-0.0.6.tgz", + "integrity": "sha512-JAH/R1zf77CSkpYATIJw+eOJwsbWocdDjY+avY7G+P5HCXxwQjAjWVkJI1QJBQYjPQDVxwf1fmTZlIN3VOadow==", + "dev": true, + "license": "MIT", + "dependencies": { + "@envelop/core": "^5.3.0", + "@graphql-tools/utils": "^10.9.1" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/graphql-config/node_modules/@graphql-tools/executor-graphql-ws": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-2.0.7.tgz", + "integrity": "sha512-J27za7sKF6RjhmvSOwOQFeNhNHyP4f4niqPnerJmq73OtLx9Y2PGOhkXOEB0PjhvPJceuttkD2O1yMgEkTGs3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/executor-common": "^0.0.6", + "@graphql-tools/utils": "^10.9.1", + "@whatwg-node/disposablestack": "^0.0.6", + "graphql-ws": "^6.0.6", + "isomorphic-ws": "^5.0.0", + "tslib": "^2.8.1", + "ws": "^8.18.3" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/graphql-config/node_modules/@graphql-tools/executor-http": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-http/-/executor-http-1.3.3.tgz", + "integrity": "sha512-LIy+l08/Ivl8f8sMiHW2ebyck59JzyzO/yF9SFS4NH6MJZUezA1xThUXCDIKhHiD56h/gPojbkpcFvM2CbNE7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-hive/signal": "^1.0.0", + "@graphql-tools/executor-common": "^0.0.4", + "@graphql-tools/utils": "^10.8.1", + "@repeaterjs/repeater": "^3.0.4", + "@whatwg-node/disposablestack": "^0.0.6", + "@whatwg-node/fetch": "^0.10.4", + "@whatwg-node/promise-helpers": "^1.3.0", + "meros": "^1.2.1", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/graphql-config/node_modules/@graphql-tools/executor-http/node_modules/@graphql-tools/executor-common": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-common/-/executor-common-0.0.4.tgz", + "integrity": "sha512-SEH/OWR+sHbknqZyROCFHcRrbZeUAyjCsgpVWCRjqjqRbiJiXq6TxNIIOmpXgkrXWW/2Ev4Wms6YSGJXjdCs6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@envelop/core": "^5.2.3", + "@graphql-tools/utils": "^10.8.1" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/graphql-config/node_modules/@graphql-tools/url-loader": { + "version": "8.0.33", + "resolved": "https://registry.npmjs.org/@graphql-tools/url-loader/-/url-loader-8.0.33.tgz", + "integrity": "sha512-Fu626qcNHcqAj8uYd7QRarcJn5XZ863kmxsg1sm0fyjyfBJnsvC7ddFt6Hayz5kxVKfsnjxiDfPMXanvsQVBKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/executor-graphql-ws": "^2.0.1", + "@graphql-tools/executor-http": "^1.1.9", + "@graphql-tools/executor-legacy-ws": "^1.1.19", + "@graphql-tools/utils": "^10.9.1", + "@graphql-tools/wrap": "^10.0.16", + "@types/ws": "^8.0.0", + "@whatwg-node/fetch": "^0.10.0", + "@whatwg-node/promise-helpers": "^1.0.0", + "isomorphic-ws": "^5.0.0", + "sync-fetch": "0.6.0-2", + "tslib": "^2.4.0", + "ws": "^8.17.1" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/graphql-config/node_modules/@graphql-tools/wrap": { + "version": "10.1.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/wrap/-/wrap-10.1.4.tgz", + "integrity": "sha512-7pyNKqXProRjlSdqOtrbnFRMQAVamCmEREilOXtZujxY6kYit3tvWWSjUrcIOheltTffoRh7EQSjpy2JDCzasg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@graphql-tools/delegate": "^10.2.23", + "@graphql-tools/schema": "^10.0.25", + "@graphql-tools/utils": "^10.9.1", + "@whatwg-node/promise-helpers": "^1.3.0", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/graphql-config/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/graphql-config/node_modules/cosmiconfig": { + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/graphql-config/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/graphql-tag": { + "version": "2.12.6", + "resolved": "https://registry.npmjs.org/graphql-tag/-/graphql-tag-2.12.6.tgz", + "integrity": "sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "graphql": "^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/graphql-ws": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/graphql-ws/-/graphql-ws-6.0.6.tgz", + "integrity": "sha512-zgfER9s+ftkGKUZgc0xbx8T7/HMO4AV5/YuYiFc+AtgcO5T0v8AxYYNQ+ltzuzDZgNkYJaFspm5MMYLjQzrkmw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20" + }, + "peerDependencies": { + "@fastify/websocket": "^10 || ^11", + "crossws": "~0.3", + "graphql": "^15.10.1 || ^16", + "uWebSockets.js": "^20", + "ws": "^8" + }, + "peerDependenciesMeta": { + "@fastify/websocket": { + "optional": true + }, + "crossws": { + "optional": true + }, + "uWebSockets.js": { + "optional": true + }, + "ws": { + "optional": true + } + } + }, + "node_modules/happy-dom": { + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/happy-dom/-/happy-dom-20.5.0.tgz", + "integrity": "sha512-VQe+Q5CYiGOgcCERXhcfNsbnrN92FDEKciMH/x6LppU9dd0j4aTjCTlqONFOIMcAm/5JxS3+utowbXV1OoFr+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": ">=20.0.0", + "@types/whatwg-mimetype": "^3.0.2", + "@types/ws": "^8.18.1", + "entities": "^4.5.0", + "whatwg-mimetype": "^3.0.0", + "ws": "^8.18.3" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/header-case": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/header-case/-/header-case-2.0.4.tgz", + "integrity": "sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "capital-case": "^1.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/headers-polyfill": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/headers-polyfill/-/headers-polyfill-4.0.3.tgz", + "integrity": "sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/html-parse-stringify": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/html-parse-stringify/-/html-parse-stringify-3.0.1.tgz", + "integrity": "sha512-KknJ50kTInJ7qIScF3jeaFRpMpE8/lfiTdzf/twXyPBLAGrLRTmkz3AdTnKeh40X8k9L2fdYwEp/42WGXIRGcg==", + "license": "MIT", + "dependencies": { + "void-elements": "3.1.0" + } + }, + "node_modules/human-signals": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-8.0.1.tgz", + "integrity": "sha512-eKCa6bwnJhvxj14kZk5NCPc6Hb6BdsU9DZcOnmQKSnO1VKrfV0zCvtttPZUsBvjmNDn8rpcJfpwSYnHBjc95MQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/i18next": { + "version": "25.8.10", + "resolved": "https://registry.npmjs.org/i18next/-/i18next-25.8.10.tgz", + "integrity": "sha512-CtPJLMAz1G8sxo+mIzfBjGgLxWs7d6WqIjlmmv9BTsOat4pJIfwZ8cm07n3kFS6bP9c6YwsYutYrwsEeJVBo2g==", + "funding": [ + { + "type": "individual", + "url": "https://locize.com" + }, + { + "type": "individual", + "url": "https://locize.com/i18next.html" + }, + { + "type": "individual", + "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project" + } + ], + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.28.4" + }, + "peerDependencies": { + "typescript": "^5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/i18next-cli": { + "version": "1.42.9", + "resolved": "https://registry.npmjs.org/i18next-cli/-/i18next-cli-1.42.9.tgz", + "integrity": "sha512-Gh11eWLMyuqoxYVBRp+e+3DEmT2hDGJswjg04iqXMnMzT89buryLKx6zL20x8F7AwkM6n4KR5iN6H61drV6EAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@croct/json5-parser": "0.2.2", + "@swc/core": "1.15.11", + "chokidar": "5.0.0", + "commander": "14.0.3", + "execa": "9.6.1", + "glob": "13.0.1", + "i18next-resources-for-ts": "2.0.0", + "inquirer": "13.2.2", + "jiti": "2.6.1", + "jsonc-parser": "3.3.1", + "minimatch": "10.1.2", + "ora": "9.3.0", + "react": "^19.2.4", + "react-i18next": "^16.5.4", + "yaml": "2.8.2" + }, + "bin": { + "i18next-cli": "dist/esm/cli.js" + }, + "engines": { + "node": ">=22" + } + }, + "node_modules/i18next-cli/node_modules/chokidar": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-5.0.0.tgz", + "integrity": "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^5.0.0" + }, + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/i18next-cli/node_modules/glob": { + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/glob/-/glob-13.0.1.tgz", + "integrity": "sha512-B7U/vJpE3DkJ5WXTgTpTRN63uV42DseiXXKMwG14LQBXmsdeIoHAPbU/MEo6II0k5ED74uc2ZGTC6MwHFQhF6w==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "minimatch": "^10.1.2", + "minipass": "^7.1.2", + "path-scurry": "^2.0.0" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/i18next-cli/node_modules/readdirp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-5.0.0.tgz", + "integrity": "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/i18next-resources-for-ts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/i18next-resources-for-ts/-/i18next-resources-for-ts-2.0.0.tgz", + "integrity": "sha512-RvATolbJlxrwpZh2+R7ZcNtg0ewmXFFx6rdu9i2bUEBvn6ThgA82rxDe3rJQa3hFS0SopX0qPaABqVDN3TUVpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.28.4", + "@swc/core": "^1.15.3", + "chokidar": "^5.0.0", + "yaml": "^2.8.2" + }, + "bin": { + "i18next-resources-for-ts": "bin/i18next-resources-for-ts.js" + } + }, + "node_modules/i18next-resources-for-ts/node_modules/chokidar": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-5.0.0.tgz", + "integrity": "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^5.0.0" + }, + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/i18next-resources-for-ts/node_modules/readdirp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-5.0.0.tgz", + "integrity": "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/iconv-lite": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz", + "integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/immutable": { + "version": "3.7.6", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-3.7.6.tgz", + "integrity": "sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-fresh/node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/import-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/import-from/-/import-from-4.0.0.tgz", + "integrity": "sha512-P9J71vT5nLlDeV8FHs5nNxaLbrpfAV5cF5srvbZfpwpcJoM/xZR3hiv+q+SAnuSmuGbXMWud063iIMx/V/EWZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/inquirer": { + "version": "13.2.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-13.2.2.tgz", + "integrity": "sha512-+hlN8I88JE9T3zjWHGnMhryniRDbSgFNJHJTyD2iKO5YNpMRyfghQ6wVoe+gV4ygMM4r4GzlsBxNa1g/UUZixA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/ansi": "^2.0.3", + "@inquirer/core": "^11.1.1", + "@inquirer/prompts": "^8.2.0", + "@inquirer/type": "^4.0.3", + "mute-stream": "^3.0.0", + "run-async": "^4.0.6", + "rxjs": "^7.8.2" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/ansi": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@inquirer/ansi/-/ansi-2.0.3.tgz", + "integrity": "sha512-g44zhR3NIKVs0zUesa4iMzExmZpLUdTLRMCStqX3GE5NT6VkPcxQGJ+uC8tDgBUC/vB1rUhUd55cOf++4NZcmw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + } + }, + "node_modules/inquirer/node_modules/@inquirer/checkbox": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-5.0.4.tgz", + "integrity": "sha512-DrAMU3YBGMUAp6ArwTIp/25CNDtDbxk7UjIrrtM25JVVrlVYlVzHh5HR1BDFu9JMyUoZ4ZanzeaHqNDttf3gVg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/ansi": "^2.0.3", + "@inquirer/core": "^11.1.1", + "@inquirer/figures": "^2.0.3", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/confirm": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-6.0.4.tgz", + "integrity": "sha512-WdaPe7foUnoGYvXzH4jp4wH/3l+dBhZ3uwhKjXjwdrq5tEIFaANxj6zrGHxLdsIA0yKM0kFPVcEalOZXBB5ISA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^11.1.1", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/core": { + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-11.1.1.tgz", + "integrity": "sha512-hV9o15UxX46OyQAtaoMqAOxGR8RVl1aZtDx1jHbCtSJy1tBdTfKxLPKf7utsE4cRy4tcmCQ4+vdV+ca+oNxqNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/ansi": "^2.0.3", + "@inquirer/figures": "^2.0.3", + "@inquirer/type": "^4.0.3", + "cli-width": "^4.1.0", + "mute-stream": "^3.0.0", + "signal-exit": "^4.1.0", + "wrap-ansi": "^9.0.2" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/editor": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-5.0.4.tgz", + "integrity": "sha512-QI3Jfqcv6UO2/VJaEFONH8Im1ll++Xn/AJTBn9Xf+qx2M+H8KZAdQ5sAe2vtYlo+mLW+d7JaMJB4qWtK4BG3pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^11.1.1", + "@inquirer/external-editor": "^2.0.3", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/expand": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-5.0.4.tgz", + "integrity": "sha512-0I/16YwPPP0Co7a5MsomlZLpch48NzYfToyqYAOWtBmaXSB80RiNQ1J+0xx2eG+Wfxt0nHtpEWSRr6CzNVnOGg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^11.1.1", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/external-editor": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@inquirer/external-editor/-/external-editor-2.0.3.tgz", + "integrity": "sha512-LgyI7Agbda74/cL5MvA88iDpvdXI2KuMBCGRkbCl2Dg1vzHeOgs+s0SDcXV7b+WZJrv2+ERpWSM65Fpi9VfY3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "chardet": "^2.1.1", + "iconv-lite": "^0.7.2" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/figures": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-2.0.3.tgz", + "integrity": "sha512-y09iGt3JKoOCBQ3w4YrSJdokcD8ciSlMIWsD+auPu+OZpfxLuyz+gICAQ6GCBOmJJt4KEQGHuZSVff2jiNOy7g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + } + }, + "node_modules/inquirer/node_modules/@inquirer/input": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-5.0.4.tgz", + "integrity": "sha512-4B3s3jvTREDFvXWit92Yc6jF1RJMDy2VpSqKtm4We2oVU65YOh2szY5/G14h4fHlyQdpUmazU5MPCFZPRJ0AOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^11.1.1", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/number": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-4.0.4.tgz", + "integrity": "sha512-CmMp9LF5HwE+G/xWsC333TlCzYYbXMkcADkKzcawh49fg2a1ryLc7JL1NJYYt1lJ+8f4slikNjJM9TEL/AljYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^11.1.1", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/password": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-5.0.4.tgz", + "integrity": "sha512-ZCEPyVYvHK4W4p2Gy6sTp9nqsdHQCfiPXIP9LbJVW4yCinnxL/dDDmPaEZVysGrj8vxVReRnpfS2fOeODe9zjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/ansi": "^2.0.3", + "@inquirer/core": "^11.1.1", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/prompts": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-8.2.0.tgz", + "integrity": "sha512-rqTzOprAj55a27jctS3vhvDDJzYXsr33WXTjODgVOru21NvBo9yIgLIAf7SBdSV0WERVly3dR6TWyp7ZHkvKFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/checkbox": "^5.0.4", + "@inquirer/confirm": "^6.0.4", + "@inquirer/editor": "^5.0.4", + "@inquirer/expand": "^5.0.4", + "@inquirer/input": "^5.0.4", + "@inquirer/number": "^4.0.4", + "@inquirer/password": "^5.0.4", + "@inquirer/rawlist": "^5.2.0", + "@inquirer/search": "^4.1.0", + "@inquirer/select": "^5.0.4" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/rawlist": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-5.2.0.tgz", + "integrity": "sha512-CciqGoOUMrFo6HxvOtU5uL8fkjCmzyeB6fG7O1vdVAZVSopUBYECOwevDBlqNLyyYmzpm2Gsn/7nLrpruy9RFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^11.1.1", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/search": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-4.1.0.tgz", + "integrity": "sha512-EAzemfiP4IFvIuWnrHpgZs9lAhWDA0GM3l9F4t4mTQ22IFtzfrk8xbkMLcAN7gmVML9O/i+Hzu8yOUyAaL6BKA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/core": "^11.1.1", + "@inquirer/figures": "^2.0.3", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/select": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-5.0.4.tgz", + "integrity": "sha512-s8KoGpPYMEQ6WXc0dT9blX2NtIulMdLOO3LA1UKOiv7KFWzlJ6eLkEYTDBIi+JkyKXyn8t/CD6TinxGjyLt57g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@inquirer/ansi": "^2.0.3", + "@inquirer/core": "^11.1.1", + "@inquirer/figures": "^2.0.3", + "@inquirer/type": "^4.0.3" + }, + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/@inquirer/type": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-4.0.3.tgz", + "integrity": "sha512-cKZN7qcXOpj1h+1eTTcGDVLaBIHNMT1Rz9JqJP5MnEJ0JhgVWllx7H/tahUp5YEK1qaByH2Itb8wLG/iScD5kw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=23.5.0 || ^22.13.0 || ^21.7.0 || ^20.12.0" + }, + "peerDependencies": { + "@types/node": ">=18" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/inquirer/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/inquirer/node_modules/emoji-regex": { + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", + "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==", + "dev": true, + "license": "MIT" + }, + "node_modules/inquirer/node_modules/mute-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-3.0.0.tgz", + "integrity": "sha512-dkEJPVvun4FryqBmZ5KhDo0K9iDXAwn08tMLDinNdRBNPcYEDiWYysLcc6k3mjTMlbP9KyylvRpd4wFtwrT9rw==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/inquirer/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/inquirer/node_modules/wrap-ansi": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", + "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/invariant": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", + "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "loose-envify": "^1.0.0" + } + }, + "node_modules/is-absolute": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-absolute/-/is-absolute-1.0.0.tgz", + "integrity": "sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-relative": "^1.0.0", + "is-windows": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "dev": true, + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.1.0.tgz", + "integrity": "sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-east-asian-width": "^1.3.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-interactive": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz", + "integrity": "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-lower-case/-/is-lower-case-2.0.2.tgz", + "integrity": "sha512-bVcMJy4X5Og6VZfdOZstSexlEy20Sr0k/p/b2IlQJlfdKAQuMpiv5w2Ccxb8sKdRUNAG1PnHVHjFSdRDVS6NlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/is-node-process": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-node-process/-/is-node-process-1.2.0.tgz", + "integrity": "sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-relative": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-relative/-/is-relative-1.0.0.tgz", + "integrity": "sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-unc-path": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-unc-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-unc-path/-/is-unc-path-1.0.0.tgz", + "integrity": "sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "unc-path-regex": "^0.1.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-unicode-supported": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", + "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-upper-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-upper-case/-/is-upper-case-2.0.2.tgz", + "integrity": "sha512-44pxmxAvnnAOwBg4tHPnkfvgjPwbc5QIsSstNU+YcJ1ovxVzCWpSGosPJOZh/a1tdl81fbgnLc9LLv+x2ywbPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/is-windows": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", + "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-wsl": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-3.1.0.tgz", + "integrity": "sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-inside-container": "^1.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isbot": { + "version": "5.1.32", + "resolved": "https://registry.npmjs.org/isbot/-/isbot-5.1.32.tgz", + "integrity": "sha512-VNfjM73zz2IBZmdShMfAUg10prm6t7HFUQmNAEOAVS4YH92ZrZcvkMcGX6cIgBJAzWDzPent/EeAtYEHNPNPBQ==", + "license": "Unlicense", + "engines": { + "node": ">=18" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/isomorphic-ws": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz", + "integrity": "sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "ws": "*" + } + }, + "node_modules/isows": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/isows/-/isows-1.0.7.tgz", + "integrity": "sha512-I1fSfDCZL5P0v33sVqeTDSpcstAg/N+wF5HS033mogOVIp4B+oHC7oOCsA3axAbBSGTJ8QubbNmnIRN/h8U7hg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/wevm" + } + ], + "license": "MIT", + "peerDependencies": { + "ws": "*" + } + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz", + "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jackspeak": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.2.3.tgz", + "integrity": "sha512-ykkVRwrYvFm1nb2AJfKKYPr0emF6IiXDYUaFx4Zn9ZuIH7MrzEZ3sD5RlqGXNRpHtvUHJyOnCEFxOlNDtGo7wg==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^9.0.0" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-to-pretty-yaml": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/json-to-pretty-yaml/-/json-to-pretty-yaml-1.2.2.tgz", + "integrity": "sha512-rvm6hunfCcqegwYaG5T4yKJWxc9FXFgBVrcTZ4XfSVRwa5HA/Xs+vB/Eo9treYYHCeNM0nrSUr82V/M31Urc7A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "remedial": "^1.0.7", + "remove-trailing-spaces": "^1.0.6" + }, + "engines": { + "node": ">= 0.2.0" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonc-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.3.1.tgz", + "integrity": "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/knip": { + "version": "5.81.0", + "resolved": "https://registry.npmjs.org/knip/-/knip-5.81.0.tgz", + "integrity": "sha512-EM9YdNg6zU2DWMJuc9zD8kPUpj0wvPspa63Qe9DPGygzL956uYThfoUQk5aNpPmMr9hs/k+Xm7FLuWFKERFkrQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/webpro" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/knip" + } + ], + "license": "ISC", + "dependencies": { + "@nodelib/fs.walk": "^1.2.3", + "fast-glob": "^3.3.3", + "formatly": "^0.3.0", + "jiti": "^2.6.0", + "js-yaml": "^4.1.1", + "minimist": "^1.2.8", + "oxc-resolver": "^11.15.0", + "picocolors": "^1.1.1", + "picomatch": "^4.0.1", + "smol-toml": "^1.5.2", + "strip-json-comments": "5.0.3", + "zod": "^4.1.11" + }, + "bin": { + "knip": "bin/knip.js", + "knip-bun": "bin/knip-bun.js" + }, + "engines": { + "node": ">=18.18.0" + }, + "peerDependencies": { + "@types/node": ">=18", + "typescript": ">=5.0.4 <7" + } + }, + "node_modules/knip/node_modules/zod": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.2.1.tgz", + "integrity": "sha512-0wZ1IRqGGhMP76gLqz8EyfBXKk0J2qo2+H3fi4mcUP/KtTocoX08nmIAHl1Z2kJIZbZee8KOpBCSNPRgauucjw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/listr2": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-9.0.5.tgz", + "integrity": "sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "cli-truncate": "^5.0.0", + "colorette": "^2.0.20", + "eventemitter3": "^5.0.1", + "log-update": "^6.1.0", + "rfdc": "^1.4.1", + "wrap-ansi": "^9.0.0" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/listr2/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/listr2/node_modules/emoji-regex": { + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", + "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==", + "dev": true, + "license": "MIT" + }, + "node_modules/listr2/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/listr2/node_modules/wrap-ansi": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", + "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.sortby": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", + "dev": true, + "license": "MIT" + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols/node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz", + "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-escapes": "^7.0.0", + "cli-cursor": "^5.0.0", + "slice-ansi": "^7.1.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-update/node_modules/emoji-regex": { + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", + "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==", + "dev": true, + "license": "MIT" + }, + "node_modules/log-update/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/wrap-ansi": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", + "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/loupe": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz", + "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/lower-case-first": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case-first/-/lower-case-first-2.0.2.tgz", + "integrity": "sha512-EVm/rR94FJTZi3zefZ82fLWab+GX14LJN4HrWBcuo6Evmsl9hEfnqxgcHCKb9q+mNf6EVdsjx/qucYFIIB84pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "lz-string": "bin/bin.js" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/magicast": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.5.1.tgz", + "integrity": "sha512-xrHS24IxaLrvuo613F719wvOIv9xPHFWQHuvGUBmPnCA/3MQxKI3b+r7n1jAoDHmsbC5bRhTZYR77invLAxVnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.5", + "@babel/types": "^7.28.5", + "source-map-js": "^1.2.1" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/map-cache": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", + "integrity": "sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/meow": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-13.2.0.tgz", + "integrity": "sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/meros": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/meros/-/meros-1.3.2.tgz", + "integrity": "sha512-Q3mobPbvEx7XbwhnC1J1r60+5H6EZyNccdzSz0eGexJRwouUtTZxPVRGdqKtxlpD84ScK4+tIGldkqDtCKdI0A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=13" + }, + "peerDependencies": { + "@types/node": ">=13" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/micromatch/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/mimic-function": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", + "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.2.tgz", + "integrity": "sha512-fu656aJ0n2kcXwsnwnv9g24tkU5uSmOlTjd6WyyaKm2Z+h1qmY6bAjrcaIxF/BslFqbZ8UBtbJi7KgQOZD2PTw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/brace-expansion": "^5.0.1" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/msw": { + "version": "2.12.8", + "resolved": "https://registry.npmjs.org/msw/-/msw-2.12.8.tgz", + "integrity": "sha512-KOriJUhjefCO+liF7Ie1KlSXcBAQEzuLhPZ4EKuEUSEmAR4YhuuzT9YuGxTipjqDrg6eWQ6oMoGVhvEnqukFGg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "@inquirer/confirm": "^5.0.0", + "@mswjs/interceptors": "^0.41.0", + "@open-draft/deferred-promise": "^2.2.0", + "@types/statuses": "^2.0.6", + "cookie": "^1.0.2", + "graphql": "^16.12.0", + "headers-polyfill": "^4.0.2", + "is-node-process": "^1.2.0", + "outvariant": "^1.4.3", + "path-to-regexp": "^6.3.0", + "picocolors": "^1.1.1", + "rettime": "^0.10.1", + "statuses": "^2.0.2", + "strict-event-emitter": "^0.5.1", + "tough-cookie": "^6.0.0", + "type-fest": "^5.2.0", + "until-async": "^3.0.2", + "yargs": "^17.7.2" + }, + "bin": { + "msw": "cli/index.js" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/mswjs" + }, + "peerDependencies": { + "typescript": ">= 4.8.x" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/msw-storybook-addon": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/msw-storybook-addon/-/msw-storybook-addon-2.0.6.tgz", + "integrity": "sha512-ExCwDbcJoM2V3iQU+fZNp+axVfNc7DWMRh4lyTXebDO8IbpUNYKGFUrA8UqaeWiRGKVuS7+fU+KXEa9b0OP6uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-node-process": "^1.0.1" + }, + "peerDependencies": { + "msw": "^2.0.0" + } + }, + "node_modules/mute-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-2.0.0.tgz", + "integrity": "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "dev": true, + "license": "MIT", + "dependencies": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "deprecated": "Use your platform's native DOMException instead", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-6.0.0.tgz", + "integrity": "sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0", + "unicorn-magic": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/nullthrows": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/nullthrows/-/nullthrows-1.1.1.tgz", + "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==", + "dev": true, + "license": "MIT" + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", + "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-function": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/open/-/open-10.2.0.tgz", + "integrity": "sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA==", + "dev": true, + "license": "MIT", + "dependencies": { + "default-browser": "^5.2.1", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "wsl-utils": "^0.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/ora/-/ora-9.3.0.tgz", + "integrity": "sha512-lBX72MWFduWEf7v7uWf5DHp9Jn5BI8bNPGuFgtXMmr2uDz2Gz2749y3am3agSDdkhHPHYmmxEGSKH85ZLGzgXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^5.6.2", + "cli-cursor": "^5.0.0", + "cli-spinners": "^3.2.0", + "is-interactive": "^2.0.0", + "is-unicode-supported": "^2.1.0", + "log-symbols": "^7.0.1", + "stdin-discarder": "^0.3.1", + "string-width": "^8.1.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora/node_modules/chalk": { + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz", + "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/ora/node_modules/log-symbols": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-7.0.1.tgz", + "integrity": "sha512-ja1E3yCr9i/0hmBVaM0bfwDjnGy8I/s6PP4DFp+yP+a+mrHO4Rm7DtmnqROTUkHIkqffC84YY7AeqX6oFk0WFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-unicode-supported": "^2.0.0", + "yoctocolors": "^2.1.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora/node_modules/string-width": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.1.tgz", + "integrity": "sha512-KpqHIdDL9KwYk22wEOg/VIqYbrnLeSApsKT/bSj6Ez7pn3CftUiLAv2Lccpq1ALcpLV9UX1Ppn92npZWu2w/aw==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-east-asian-width": "^1.3.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/outvariant": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/outvariant/-/outvariant-1.4.3.tgz", + "integrity": "sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==", + "dev": true, + "license": "MIT" + }, + "node_modules/oxc-resolver": { + "version": "11.15.0", + "resolved": "https://registry.npmjs.org/oxc-resolver/-/oxc-resolver-11.15.0.tgz", + "integrity": "sha512-Hk2J8QMYwmIO9XTCUiOH00+Xk2/+aBxRUnhrSlANDyCnLYc32R1WSIq1sU2yEdlqd53FfMpPEpnBYIKQMzliJw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + }, + "optionalDependencies": { + "@oxc-resolver/binding-android-arm-eabi": "11.15.0", + "@oxc-resolver/binding-android-arm64": "11.15.0", + "@oxc-resolver/binding-darwin-arm64": "11.15.0", + "@oxc-resolver/binding-darwin-x64": "11.15.0", + "@oxc-resolver/binding-freebsd-x64": "11.15.0", + "@oxc-resolver/binding-linux-arm-gnueabihf": "11.15.0", + "@oxc-resolver/binding-linux-arm-musleabihf": "11.15.0", + "@oxc-resolver/binding-linux-arm64-gnu": "11.15.0", + "@oxc-resolver/binding-linux-arm64-musl": "11.15.0", + "@oxc-resolver/binding-linux-ppc64-gnu": "11.15.0", + "@oxc-resolver/binding-linux-riscv64-gnu": "11.15.0", + "@oxc-resolver/binding-linux-riscv64-musl": "11.15.0", + "@oxc-resolver/binding-linux-s390x-gnu": "11.15.0", + "@oxc-resolver/binding-linux-x64-gnu": "11.15.0", + "@oxc-resolver/binding-linux-x64-musl": "11.15.0", + "@oxc-resolver/binding-openharmony-arm64": "11.15.0", + "@oxc-resolver/binding-wasm32-wasi": "11.15.0", + "@oxc-resolver/binding-win32-arm64-msvc": "11.15.0", + "@oxc-resolver/binding-win32-ia32-msvc": "11.15.0", + "@oxc-resolver/binding-win32-x64-msvc": "11.15.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/param-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", + "integrity": "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==", + "dev": true, + "license": "MIT", + "dependencies": { + "dot-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-filepath": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/parse-filepath/-/parse-filepath-1.0.2.tgz", + "integrity": "sha512-FwdRXKCohSVeXqwtYonZTXtbGJKrn+HNyWDYVcp5yuJlesTwNH4rsmRZ+GrKAPJ5bLpRxESMeS+Rl0VCHRvB2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-absolute": "^1.0.0", + "map-cache": "^0.2.0", + "path-root": "^0.1.1" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parse-ms": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-4.0.0.tgz", + "integrity": "sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pascal-case": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", + "dev": true, + "license": "MIT", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/path-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/path-case/-/path-case-3.0.4.tgz", + "integrity": "sha512-qO4qCFjXqVTrcbPt/hQfhTQ+VhFsqNKOPtytgNKkKxSoEp3XPUQ8ObFuePylOIok5gjn69ry8XiULxCwot3Wfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "dot-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/path-root": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/path-root/-/path-root-0.1.1.tgz", + "integrity": "sha512-QLcPegTHF11axjfojBIoDygmS2E3Lf+8+jI6wOVmNVenrKSo3mFdSGiIgdSHenczw3wPtlVMQaFVwGmM7BJdtg==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-root-regex": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-root-regex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/path-root-regex/-/path-root-regex-0.1.2.tgz", + "integrity": "sha512-4GlJ6rZDhQZFE0DPVKh0e9jmZ5egZfxTkp7bcRDuPlJXbAwhxcl2dINPUAsjLdejqaLsCeg8axcLjIbvBjN4pQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-scurry": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.1.tgz", + "integrity": "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^11.0.0", + "minipass": "^7.1.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "11.2.4", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz", + "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==", + "dev": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/path-to-regexp": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", + "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pathval": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.16" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "16.1.1", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-16.1.1.tgz", + "integrity": "sha512-2xVS1NCZAfjtVdvXiyegxzJ447GyqCeEI5V7ApgQVOWnros1p5lGNovJNapwPpMombyFBfqDwt7AD3n2l0KOfQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz", + "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-load-config": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", + "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "lilconfig": "^3.1.1" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "jiti": ">=1.21.0", + "postcss": ">=8.0.9", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + }, + "postcss": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-nested/node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-nesting": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/postcss-nesting/-/postcss-nesting-14.0.0.tgz", + "integrity": "sha512-YGFOfVrjxYfeGTS5XctP1WCI5hu8Lr9SmntjfRC+iX5hCihEO+QZl9Ra+pkjqkgoVdDKvb2JccpElcowhZtzpw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "license": "MIT-0", + "dependencies": { + "@csstools/selector-resolve-nested": "^4.0.0", + "@csstools/selector-specificity": "^6.0.0", + "postcss-selector-parser": "^7.1.1" + }, + "engines": { + "node": ">=20.19.0" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-selector-parser": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", + "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/prettier": { + "version": "3.7.4", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.7.4.tgz", + "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/pretty-format": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/pretty-ms": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-9.3.0.tgz", + "integrity": "sha512-gjVS5hOP+M3wMm5nmNOucbIrqudzs9v/57bWRHQWLYklXqoXKrVfYW2W9+glfGsqtPgpiz5WwyEEB+ksXIx3gQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse-ms": "^4.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/promise": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", + "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "asap": "~2.0.3" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/react": { + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz", + "integrity": "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-docgen": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/react-docgen/-/react-docgen-8.0.2.tgz", + "integrity": "sha512-+NRMYs2DyTP4/tqWz371Oo50JqmWltR1h2gcdgUMAWZJIAvrd0/SqlCfx7tpzpl/s36rzw6qH2MjoNrxtRNYhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/traverse": "^7.28.0", + "@babel/types": "^7.28.2", + "@types/babel__core": "^7.20.5", + "@types/babel__traverse": "^7.20.7", + "@types/doctrine": "^0.0.9", + "@types/resolve": "^1.20.2", + "doctrine": "^3.0.0", + "resolve": "^1.22.1", + "strip-indent": "^4.0.0" + }, + "engines": { + "node": "^20.9.0 || >=22" + } + }, + "node_modules/react-docgen-typescript": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/react-docgen-typescript/-/react-docgen-typescript-2.4.0.tgz", + "integrity": "sha512-ZtAp5XTO5HRzQctjPU0ybY0RRCQO19X/8fxn3w7y2VVTUbGHDKULPTL4ky3vB05euSgG5NpALhEhDPvQ56wvXg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "typescript": ">= 4.3.x" + } + }, + "node_modules/react-dom": { + "version": "19.2.4", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.4.tgz", + "integrity": "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==", + "license": "MIT", + "dependencies": { + "scheduler": "^0.27.0" + }, + "peerDependencies": { + "react": "^19.2.4" + } + }, + "node_modules/react-i18next": { + "version": "16.5.4", + "resolved": "https://registry.npmjs.org/react-i18next/-/react-i18next-16.5.4.tgz", + "integrity": "sha512-6yj+dcfMncEC21QPhOTsW8mOSO+pzFmT6uvU7XXdvM/Cp38zJkmTeMeKmTrmCMD5ToT79FmiE/mRWiYWcJYW4g==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.28.4", + "html-parse-stringify": "^3.0.1", + "use-sync-external-store": "^1.6.0" + }, + "peerDependencies": { + "i18next": ">= 25.6.2", + "react": ">= 16.8.0", + "typescript": "^5" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + }, + "react-native": { + "optional": true + }, + "typescript": { + "optional": true + } + } + }, + "node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true, + "license": "MIT", + "peer": true + }, + "node_modules/react-refresh": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.18.0.tgz", + "integrity": "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-remove-scroll": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.2.tgz", + "integrity": "sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==", + "license": "MIT", + "dependencies": { + "react-remove-scroll-bar": "^2.3.7", + "react-style-singleton": "^2.2.3", + "tslib": "^2.1.0", + "use-callback-ref": "^1.3.3", + "use-sidecar": "^1.1.3" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-remove-scroll-bar": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.8.tgz", + "integrity": "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==", + "license": "MIT", + "dependencies": { + "react-style-singleton": "^2.2.2", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-style-singleton": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.3.tgz", + "integrity": "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==", + "license": "MIT", + "dependencies": { + "get-nonce": "^1.0.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/readdirp/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/recast": { + "version": "0.23.11", + "resolved": "https://registry.npmjs.org/recast/-/recast-0.23.11.tgz", + "integrity": "sha512-YTUo+Flmw4ZXiWfQKGcwwc11KnoRAYgzAE2E7mXKCjSviTKShtxBsN6YUUBB2gtaBzKzeKunxhUwNHQuRryhWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ast-types": "^0.16.1", + "esprima": "~4.0.0", + "source-map": "~0.6.1", + "tiny-invariant": "^1.3.3", + "tslib": "^2.0.1" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/recast/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/redent/node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/relay-runtime": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/relay-runtime/-/relay-runtime-12.0.0.tgz", + "integrity": "sha512-QU6JKr1tMsry22DXNy9Whsq5rmvwr3LSZiiWV/9+DFpuTWvp+WFhobWMc8TC4OjKFfNhEZy7mOiqUAn5atQtug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.0.0", + "fbjs": "^3.0.0", + "invariant": "^2.2.4" + } + }, + "node_modules/remedial": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/remedial/-/remedial-1.0.8.tgz", + "integrity": "sha512-/62tYiOe6DzS5BqVsNpH/nkGlX45C/Sp6V+NtiN6JQNS1Viay7cWkazmRkrQrdFj2eshDe96SIQNIoMxqhzBOg==", + "dev": true, + "license": "(MIT OR Apache-2.0)", + "engines": { + "node": "*" + } + }, + "node_modules/remove-trailing-separator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", + "integrity": "sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==", + "dev": true, + "license": "ISC" + }, + "node_modules/remove-trailing-spaces": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/remove-trailing-spaces/-/remove-trailing-spaces-1.0.9.tgz", + "integrity": "sha512-xzG7w5IRijvIkHIjDk65URsJJ7k4J95wmcArY5PRcmjldIOl7oTvG8+X2Ag690R7SfwiOcHrWZKVc1Pp5WIOzA==", + "dev": true, + "license": "MIT" + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true, + "license": "ISC" + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/restore-cursor": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", + "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", + "dev": true, + "license": "MIT", + "dependencies": { + "onetime": "^7.0.0", + "signal-exit": "^4.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/rettime": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/rettime/-/rettime-0.10.1.tgz", + "integrity": "sha512-uyDrIlUEH37cinabq0AX4QbgV4HbFZ/gqoiunWQ1UqBtRvTTytwhNYjE++pO/MjPTZL5KQCf2bEoJ/BJNVQ5Kw==", + "dev": true, + "license": "MIT" + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", + "dev": true, + "license": "MIT" + }, + "node_modules/rimraf": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-6.1.2.tgz", + "integrity": "sha512-cFCkPslJv7BAXJsYlK1dZsbP8/ZNLkCAQ0bi1hf5EKX2QHegmDFEFA6QhuYJlk7UDdc+02JjO80YSOrWPpw06g==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "glob": "^13.0.0", + "package-json-from-dist": "^1.0.1" + }, + "bin": { + "rimraf": "dist/esm/bin.mjs" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rollup": { + "version": "4.53.5", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.53.5.tgz", + "integrity": "sha512-iTNAbFSlRpcHeeWu73ywU/8KuU/LZmNCSxp6fjQkJBD3ivUb8tpDrXhIxEzA05HlYMEwmtaUnb3RP+YNv162OQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.53.5", + "@rollup/rollup-android-arm64": "4.53.5", + "@rollup/rollup-darwin-arm64": "4.53.5", + "@rollup/rollup-darwin-x64": "4.53.5", + "@rollup/rollup-freebsd-arm64": "4.53.5", + "@rollup/rollup-freebsd-x64": "4.53.5", + "@rollup/rollup-linux-arm-gnueabihf": "4.53.5", + "@rollup/rollup-linux-arm-musleabihf": "4.53.5", + "@rollup/rollup-linux-arm64-gnu": "4.53.5", + "@rollup/rollup-linux-arm64-musl": "4.53.5", + "@rollup/rollup-linux-loong64-gnu": "4.53.5", + "@rollup/rollup-linux-ppc64-gnu": "4.53.5", + "@rollup/rollup-linux-riscv64-gnu": "4.53.5", + "@rollup/rollup-linux-riscv64-musl": "4.53.5", + "@rollup/rollup-linux-s390x-gnu": "4.53.5", + "@rollup/rollup-linux-x64-gnu": "4.53.5", + "@rollup/rollup-linux-x64-musl": "4.53.5", + "@rollup/rollup-openharmony-arm64": "4.53.5", + "@rollup/rollup-win32-arm64-msvc": "4.53.5", + "@rollup/rollup-win32-ia32-msvc": "4.53.5", + "@rollup/rollup-win32-x64-gnu": "4.53.5", + "@rollup/rollup-win32-x64-msvc": "4.53.5", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-applescript": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-7.1.0.tgz", + "integrity": "sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-async": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-4.0.6.tgz", + "integrity": "sha512-IoDlSLTs3Yq593mb3ZoKWKXMNu3UpObxhgA/Xuid5p4bbfi2jdY1Hj0m1K+0/tEuQTxIGMhQDqGjKb7RuxGpAQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rxjs": { + "version": "7.8.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "license": "MIT" + }, + "node_modules/scheduler": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/sentence-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/sentence-case/-/sentence-case-3.0.4.tgz", + "integrity": "sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3", + "upper-case-first": "^2.0.2" + } + }, + "node_modules/seroval": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/seroval/-/seroval-1.4.2.tgz", + "integrity": "sha512-N3HEHRCZYn3cQbsC4B5ldj9j+tHdf4JZoYPlcI4rRYu0Xy4qN8MQf1Z08EibzB0WpgRG5BGK08FTrmM66eSzKQ==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/seroval-plugins": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/seroval-plugins/-/seroval-plugins-1.4.0.tgz", + "integrity": "sha512-zir1aWzoiax6pbBVjoYVd0O1QQXgIL3eVGBMsBsNmM8Ukq90yGaWlfx0AB9dTS8GPqrOrbXn79vmItCUP9U3BQ==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "seroval": "^1.0" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true, + "license": "ISC" + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "dev": true, + "license": "MIT" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shell-quote": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", + "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/signedsource": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/signedsource/-/signedsource-1.0.0.tgz", + "integrity": "sha512-6+eerH9fEnNmi/hyM1DXcRK3pWdoMQtlkQ+ns0ntzunjKqp5i3sKCc80ym8Fib3iaYhdJUOPdhlJWj1tvge2Ww==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.2.tgz", + "integrity": "sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "is-fullwidth-code-point": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/smol-toml": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/smol-toml/-/smol-toml-1.5.2.tgz", + "integrity": "sha512-QlaZEqcAH3/RtNyet1IPIYPsEWAaYyXXv1Krsi+1L/QHppjX4Ifm8MQsBISz9vE8cHicIq3clogsheili5vhaQ==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 18" + }, + "funding": { + "url": "https://github.com/sponsors/cyyynthia" + } + }, + "node_modules/snake-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz", + "integrity": "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==", + "dev": true, + "license": "MIT", + "dependencies": { + "dot-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/source-map": { + "version": "0.7.6", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz", + "integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 12" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sponge-case": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sponge-case/-/sponge-case-1.0.1.tgz", + "integrity": "sha512-dblb9Et4DAtiZ5YSUZHLl4XhH4uK80GhAZrVXdN4O2P4gQ40Wa5UIOPUHlA/nFd2PLblBZWUioLMMAVrgpoYcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/std-env": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", + "dev": true, + "license": "MIT" + }, + "node_modules/stdin-discarder": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.3.1.tgz", + "integrity": "sha512-reExS1kSGoElkextOcPkel4NE99S0BWxjUHQeDFnR8S993JxpPX7KU4MNmO19NXhlJp+8dmdCbKQVNgLJh2teA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/storybook": { + "version": "10.2.9", + "resolved": "https://registry.npmjs.org/storybook/-/storybook-10.2.9.tgz", + "integrity": "sha512-DGok7XwIwdPWF+a49Yw+4madER5DZWRo9CdyySBLT3zeuxiEPt0Ua7ouJHm/y6ojnb/FVKZcQe8YmrE71s0qPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "@storybook/icons": "^2.0.1", + "@testing-library/jest-dom": "^6.6.3", + "@testing-library/user-event": "^14.6.1", + "@vitest/expect": "3.2.4", + "@vitest/spy": "3.2.4", + "esbuild": "^0.18.0 || ^0.19.0 || ^0.20.0 || ^0.21.0 || ^0.22.0 || ^0.23.0 || ^0.24.0 || ^0.25.0 || ^0.26.0 || ^0.27.0", + "open": "^10.2.0", + "recast": "^0.23.5", + "semver": "^7.7.3", + "use-sync-external-store": "^1.5.0", + "ws": "^8.18.0" + }, + "bin": { + "storybook": "dist/bin/dispatcher.js" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "prettier": "^2 || ^3" + }, + "peerDependenciesMeta": { + "prettier": { + "optional": true + } + } + }, + "node_modules/storybook/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/strict-event-emitter": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/strict-event-emitter/-/strict-event-emitter-0.5.1.tgz", + "integrity": "sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/string-env-interpolation": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string-env-interpolation/-/string-env-interpolation-1.0.1.tgz", + "integrity": "sha512-78lwMoCcn0nNu8LszbP1UA7g55OeE4v7rCeWnM5B453rnNr4aq+5it3FEYtZrSEiMvHZOZ9Jlqb0OD0M2VInqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-final-newline": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-4.0.0.tgz", + "integrity": "sha512-aulFJcD6YK8V1G7iRB5tigAP4TsHBZZrOV8pjV++zdUwmeV8uzbY7yn6h9MswN62adStNZFuCIx4haBnRuMDaw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-indent": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-4.1.1.tgz", + "integrity": "sha512-SlyRoSkdh1dYP0PzclLE7r0M9sgbFKKMFXpFRUMNuKhQSbC6VQIGzq3E0qsfvGJaUFJPGv6Ws1NZ/haTAjfbMA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-json-comments": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.3.tgz", + "integrity": "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/sucrase": { + "version": "3.35.1", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", + "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "tinyglobby": "^0.2.11", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/sucrase/node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/swagger-ui-dist": { + "version": "5.31.0", + "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-5.31.0.tgz", + "integrity": "sha512-zSUTIck02fSga6rc0RZP3b7J7wgHXwLea8ZjgLA3Vgnb8QeOl3Wou2/j5QkzSGeoz6HusP/coYuJl33aQxQZpg==", + "license": "Apache-2.0", + "dependencies": { + "@scarf/scarf": "=1.4.0" + } + }, + "node_modules/swap-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/swap-case/-/swap-case-2.0.2.tgz", + "integrity": "sha512-kc6S2YS/2yXbtkSMunBtKdah4VFETZ8Oh6ONSmSd9bRxhqTrtARUCBUiWXH3xVPpvR7tz2CSnkuXVE42EcGnMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/sync-fetch": { + "version": "0.6.0-2", + "resolved": "https://registry.npmjs.org/sync-fetch/-/sync-fetch-0.6.0-2.tgz", + "integrity": "sha512-c7AfkZ9udatCuAy9RSfiGPpeOKKUAUK5e1cXadLOGUjasdxqYqAK0jTNkM/FSEyJ3a5Ra27j/tw/PS0qLmaF/A==", + "dev": true, + "license": "MIT", + "dependencies": { + "node-fetch": "^3.3.2", + "timeout-signal": "^2.0.0", + "whatwg-mimetype": "^4.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/sync-fetch/node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "dev": true, + "license": "MIT", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/sync-fetch/node_modules/whatwg-mimetype": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", + "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/tabbable": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.3.0.tgz", + "integrity": "sha512-EIHvdY5bPLuWForiR/AN2Bxngzpuwn1is4asboytXtpTgsArc+WmSJKVLlhdh71u7jFcryDqB2A8lQvj78MkyQ==", + "license": "MIT" + }, + "node_modules/tagged-tag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/tagged-tag/-/tagged-tag-1.0.0.tgz", + "integrity": "sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/tailwindcss": { + "version": "3.4.19", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.19.tgz", + "integrity": "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.7", + "lilconfig": "^3.1.3", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tailwindcss/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/tailwindcss/node_modules/jiti": { + "version": "1.21.7", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", + "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/tailwindcss/node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/tailwindcss/node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/timeout-signal": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/timeout-signal/-/timeout-signal-2.0.0.tgz", + "integrity": "sha512-YBGpG4bWsHoPvofT6y/5iqulfXIiIErl5B0LdtHT1mGXDFTAhhRrbUpTvBgYbovr+3cKblya2WAOcpoy90XguA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + } + }, + "node_modules/tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", + "license": "MIT" + }, + "node_modules/tiny-warning": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", + "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", + "license": "MIT" + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyrainbow": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz", + "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.4.tgz", + "integrity": "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/title-case": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/title-case/-/title-case-3.0.3.tgz", + "integrity": "sha512-e1zGYRvbffpcHIrnuqT0Dh+gEJtDaxDSoG4JAIpq4oDFyooziLBIiYQv0GBT4FUAnUop5uZ1hiIAj7oAF6sOCA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/tldts": { + "version": "7.0.19", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.19.tgz", + "integrity": "sha512-8PWx8tvC4jDB39BQw1m4x8y5MH1BcQ5xHeL2n7UVFulMPH/3Q0uiamahFJ3lXA0zO2SUyRXuVVbWSDmstlt9YA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tldts-core": "^7.0.19" + }, + "bin": { + "tldts": "bin/cli.js" + } + }, + "node_modules/tldts-core": { + "version": "7.0.19", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.19.tgz", + "integrity": "sha512-lJX2dEWx0SGH4O6p+7FPwYmJ/bu1JbcGJ8RLaG9b7liIgZ85itUVEPbMtWRVrde/0fnDPEPHW10ZsKW3kVsE9A==", + "dev": true, + "license": "MIT" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tough-cookie": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.0.tgz", + "integrity": "sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tldts": "^7.0.5" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true, + "license": "MIT" + }, + "node_modules/ts-dedent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", + "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.10" + } + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/ts-log": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/ts-log/-/ts-log-2.2.7.tgz", + "integrity": "sha512-320x5Ggei84AxzlXp91QkIGSw5wgaLT6GeAH0KsqDmRZdVWW2OiSeVvElVoatk3f7nicwXlElXsoFkARiGE2yg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tsconfig-paths": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz", + "integrity": "sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "json5": "^2.2.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/tsx": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz", + "integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "~0.27.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/type-fest": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-5.3.1.tgz", + "integrity": "sha512-VCn+LMHbd4t6sF3wfU/+HKT63C9OoyrSIf4b+vtWHpt2U7/4InZG467YDNMFMR70DdHjAdpPWmw2lzRdg0Xqqg==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "dependencies": { + "tagged-tag": "^1.0.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "devOptional": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/ua-parser-js": { + "version": "1.0.41", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.41.tgz", + "integrity": "sha512-LbBDqdIC5s8iROCUjMbW1f5dJQTEFB1+KO9ogbvlb3nm9n4YHa5p4KTvFPWvh2Hs8gZMBuiB1/8+pdfe/tDPug==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/ua-parser-js" + }, + { + "type": "paypal", + "url": "https://paypal.me/faisalman" + }, + { + "type": "github", + "url": "https://github.com/sponsors/faisalman" + } + ], + "license": "MIT", + "bin": { + "ua-parser-js": "script/cli.js" + }, + "engines": { + "node": "*" + } + }, + "node_modules/unc-path-regex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz", + "integrity": "sha512-eXL4nmJT7oCpkZsHZUOJo8hcX3GbsiDOa0Qu9F646fi8dT3XuSVopVqAcEiVzSKKH7UoDti23wNX3qGFxcW5Qg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "dev": true, + "license": "MIT" + }, + "node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unixify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unixify/-/unixify-1.0.0.tgz", + "integrity": "sha512-6bc58dPYhCMHHuwxldQxO3RRNZ4eCogZ/st++0+fcC1nr0jiGUtAdBJ2qzmLQWSxbtz42pWt4QQMiZ9HvZf5cg==", + "dev": true, + "license": "MIT", + "dependencies": { + "normalize-path": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unixify/node_modules/normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "remove-trailing-separator": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unplugin": { + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-2.3.11.tgz", + "integrity": "sha512-5uKD0nqiYVzlmCRs01Fhs2BdkEgBS3SAVP6ndrBsuK42iC2+JHyxM05Rm9G8+5mkmRtzMZGY8Ct5+mliZxU/Ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.5", + "acorn": "^8.15.0", + "picomatch": "^4.0.3", + "webpack-virtual-modules": "^0.6.2" + }, + "engines": { + "node": ">=18.12.0" + } + }, + "node_modules/until-async": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/until-async/-/until-async-3.0.2.tgz", + "integrity": "sha512-IiSk4HlzAMqTUseHHe3VhIGyuFmN90zMTpD3Z3y8jeQbzLIq500MVM7Jq2vUAnTKAFPJrqwkzr6PoTcPhGcOiw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/kettanaito" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.2.tgz", + "integrity": "sha512-E85pfNzMQ9jpKkA7+TJAi4TJN+tBCuWh5rUcS/sv6cFi+1q9LYDwDI5dpUL0u/73EElyQ8d3TEaeW4sPedBqYA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/upper-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/upper-case/-/upper-case-2.0.2.tgz", + "integrity": "sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/upper-case-first": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/upper-case-first/-/upper-case-first-2.0.2.tgz", + "integrity": "sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg==", + "dev": true, + "license": "MIT", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/urlpattern-polyfill": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.1.0.tgz", + "integrity": "sha512-IGjKp/o0NL3Bso1PymYURCJxMPNAf/ILOpendP9f5B6e1rTJgdgiOvgfoT8VxCAdY+Wisb9uhGaJJf3yZ2V9nw==", + "dev": true, + "license": "MIT" + }, + "node_modules/use-callback-ref": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz", + "integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/use-sidecar": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz", + "integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==", + "license": "MIT", + "dependencies": { + "detect-node-es": "^1.1.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/use-sync-external-store": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz", + "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==", + "license": "MIT", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/valibot": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/valibot/-/valibot-1.2.0.tgz", + "integrity": "sha512-mm1rxUsmOxzrwnX5arGS+U4T25RdvpPjPN4yR0u9pUBov9+zGVtO84tif1eY4r6zWxVxu3KzIyknJy3rxfRZZg==", + "license": "MIT", + "peerDependencies": { + "typescript": ">=5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/vaul": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vaul/-/vaul-1.1.2.tgz", + "integrity": "sha512-ZFkClGpWyI2WUQjdLJ/BaGuV6AVQiJ3uELGk3OYtP+B6yCO7Cmn9vPFXVJkRaGkOJu3m8bQMgtyzNHixULceQA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-dialog": "^1.1.1" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc" + } + }, + "node_modules/vite": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite-plugin-graphql-codegen": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/vite-plugin-graphql-codegen/-/vite-plugin-graphql-codegen-3.8.0.tgz", + "integrity": "sha512-VMmtlJD1G7lkwsYW/BDMwctRTLnK8izSoFkgVFBDaY+Fp8AVxZs3Q4TqVeUvRwllF37VFa+ae8ufou6G1+Kk3Q==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@graphql-codegen/cli": ">=1.0.0 <7.0.0", + "graphql": ">=14.0.0 <17.0.0", + "vite": ">=2.7.0 <9.0.0" + } + }, + "node_modules/vitest": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.18.tgz", + "integrity": "sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "4.0.18", + "@vitest/mocker": "4.0.18", + "@vitest/pretty-format": "4.0.18", + "@vitest/runner": "4.0.18", + "@vitest/snapshot": "4.0.18", + "@vitest/spy": "4.0.18", + "@vitest/utils": "4.0.18", + "es-module-lexer": "^1.7.0", + "expect-type": "^1.2.2", + "magic-string": "^0.30.21", + "obug": "^2.1.1", + "pathe": "^2.0.3", + "picomatch": "^4.0.3", + "std-env": "^3.10.0", + "tinybench": "^2.9.0", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.0.3", + "vite": "^6.0.0 || ^7.0.0", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@opentelemetry/api": "^1.9.0", + "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", + "@vitest/browser-playwright": "4.0.18", + "@vitest/browser-preview": "4.0.18", + "@vitest/browser-webdriverio": "4.0.18", + "@vitest/ui": "4.0.18", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@opentelemetry/api": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser-playwright": { + "optional": true + }, + "@vitest/browser-preview": { + "optional": true + }, + "@vitest/browser-webdriverio": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/@vitest/expect": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.18.tgz", + "integrity": "sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@types/chai": "^5.2.2", + "@vitest/spy": "4.0.18", + "@vitest/utils": "4.0.18", + "chai": "^6.2.1", + "tinyrainbow": "^3.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vitest/node_modules/@vitest/mocker": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.18.tgz", + "integrity": "sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "4.0.18", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.21" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/@vitest/spy": { + "version": "4.0.18", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.18.tgz", + "integrity": "sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vitest/node_modules/chai": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz", + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/vitest/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/void-elements": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz", + "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/walk-up-path": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/walk-up-path/-/walk-up-path-4.0.0.tgz", + "integrity": "sha512-3hu+tD8YzSLGuFYtPRb48vdhKMi0KQV5sn+uWr8+7dMEq/2G/dtLrdDinkLjqq5TIbIBjYJ4Ax/n3YiaW7QM8A==", + "dev": true, + "license": "ISC", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/web-streams-polyfill": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/webpack-virtual-modules": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.2.tgz", + "integrity": "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/whatwg-mimetype": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", + "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-module": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz", + "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/ws": { + "version": "8.18.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", + "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/wsl-utils": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/wsl-utils/-/wsl-utils-0.1.0.tgz", + "integrity": "sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-wsl": "^3.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yaml": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/yargs/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yoctocolors": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yoctocolors/-/yoctocolors-2.1.2.tgz", + "integrity": "sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yoctocolors-cjs": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.3.tgz", + "integrity": "sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zod": { + "version": "3.25.76", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + } + } +} diff --git a/matrix-authentication-service/frontend/package.json b/matrix-authentication-service/frontend/package.json new file mode 100644 index 00000000..4e10f1ae --- /dev/null +++ b/matrix-authentication-service/frontend/package.json @@ -0,0 +1,88 @@ +{ + "name": "mas-frontend", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "generate": "graphql-codegen && i18next-cli extract", + "lint": "graphql-codegen && biome check && tsc && i18next-cli extract --ci", + "format": "biome format --write", + "build": "rimraf ./dist/ && vite build", + "preview": "vite preview", + "test": "vitest", + "coverage": "vitest run --coverage", + "storybook": "storybook dev -p 6006", + "build-storybook": "storybook build", + "i18n": "i18next-cli", + "knip": "knip" + }, + "dependencies": { + "@fontsource/inconsolata": "^5.2.8", + "@fontsource/inter": "^5.2.8", + "@radix-ui/react-collapsible": "^1.1.12", + "@radix-ui/react-dialog": "^1.1.15", + "@tanstack/react-query": "^5.90.21", + "@tanstack/react-router": "^1.150.0", + "@vector-im/compound-design-tokens": "6.4.3", + "@vector-im/compound-web": "^8.3.5", + "@zxcvbn-ts/core": "^3.0.4", + "@zxcvbn-ts/language-common": "^3.0.4", + "classnames": "^2.5.1", + "date-fns": "^4.1.0", + "i18next": "^25.8.10", + "react": "^19.2.4", + "react-dom": "^19.2.4", + "react-i18next": "^16.5.4", + "swagger-ui-dist": "^5.31.0", + "valibot": "^1.2.0", + "vaul": "^1.1.2" + }, + "devDependencies": { + "@biomejs/biome": "^2.3.9", + "@browser-logos/chrome": "^2.0.0", + "@browser-logos/firefox": "^3.0.10", + "@browser-logos/safari": "^2.1.0", + "@graphql-codegen/cli": "^6.1.1", + "@graphql-codegen/client-preset": "^5.2.2", + "@graphql-codegen/typescript-msw": "^3.0.1", + "@storybook/addon-docs": "^10.2.9", + "@storybook/react-vite": "^10.2.9", + "@tanstack/react-query-devtools": "^5.91.3", + "@tanstack/react-router-devtools": "^1.150.0", + "@tanstack/router-plugin": "^1.150.0", + "@testing-library/jest-dom": "^6.9.1", + "@testing-library/react": "^16.3.1", + "@testing-library/user-event": "^14.6.1", + "@types/node": "^25.2.3", + "@types/react": "19.2.14", + "@types/react-dom": "19.2.3", + "@types/swagger-ui-dist": "^3.30.6", + "@vitejs/plugin-react": "^5.1.4", + "@vitest/coverage-v8": "^4.0.18", + "autoprefixer": "^10.4.23", + "browserslist-to-esbuild": "^2.1.1", + "graphql": "^16.12.0", + "happy-dom": "^20.5.0", + "i18next-cli": "^1.42.9", + "knip": "^5.81.0", + "msw": "^2.12.8", + "msw-storybook-addon": "^2.0.6", + "postcss": "^8.5.6", + "postcss-import": "^16.1.1", + "postcss-nesting": "^14.0.0", + "rimraf": "^6.1.2", + "storybook": "^10.1.11", + "tailwindcss": "^3.4.19", + "tinyglobby": "^0.2.15", + "typescript": "^5.9.3", + "vite": "7.3.1", + "vite-plugin-graphql-codegen": "^3.8.0", + "vitest": "^4.0.15" + }, + "msw": { + "workerDirectory": [ + ".storybook/public" + ] + } +} diff --git a/matrix-authentication-service/frontend/schema.graphql b/matrix-authentication-service/frontend/schema.graphql new file mode 100644 index 00000000..99da3201 --- /dev/null +++ b/matrix-authentication-service/frontend/schema.graphql @@ -0,0 +1,2491 @@ +""" +The input for the `addEmail` mutation +""" +input AddEmailInput { + """ + The email address to add + """ + email: String! + """ + The ID of the user to add the email address to + """ + userId: ID! + """ + Skip the email address verification. Only allowed for admins. + """ + skipVerification: Boolean + """ + Skip the email address policy check. Only allowed for admins. + """ + skipPolicyCheck: Boolean +} + +""" +The payload of the `addEmail` mutation +""" +type AddEmailPayload { + """ + Status of the operation + """ + status: AddEmailStatus! + """ + The email address that was added + """ + email: UserEmail + """ + The user to whom the email address was added + """ + user: User + """ + The list of policy violations if the email address was denied + """ + violations: [String!] +} + +""" +The status of the `addEmail` mutation +""" +enum AddEmailStatus { + """ + The email address was added + """ + ADDED + """ + The email address already exists + """ + EXISTS + """ + The email address is invalid + """ + INVALID + """ + The email address is not allowed by the policy + """ + DENIED +} + +""" +The input for the `addUser` mutation. +""" +input AddUserInput { + """ + The username of the user to add. + """ + username: String! + """ + Skip checking with the homeserver whether the username is valid. + + Use this with caution! The main reason to use this, is when a user used + by an application service needs to exist in MAS to craft special + tokens (like with admin access) for them + """ + skipHomeserverCheck: Boolean +} + +""" +The payload for the `addUser` mutation. +""" +type AddUserPayload { + """ + Status of the operation + """ + status: AddUserStatus! + """ + The user that was added. + """ + user: User +} + +""" +The status of the `addUser` mutation. +""" +enum AddUserStatus { + """ + The user was added. + """ + ADDED + """ + The user already exists. + """ + EXISTS + """ + The username is reserved. + """ + RESERVED + """ + The username is invalid. + """ + INVALID +} + +""" +The input for the `allowUserCrossSigningReset` mutation. +""" +input AllowUserCrossSigningResetInput { + """ + The ID of the user to update. + """ + userId: ID! +} + +""" +The payload for the `allowUserCrossSigningReset` mutation. +""" +type AllowUserCrossSigningResetPayload { + """ + The user that was updated. + """ + user: User +} + +type Anonymous implements Node { + id: ID! +} + +""" +A session in an application, either a compatibility or an OAuth 2.0 one +""" +union AppSession = CompatSession | Oauth2Session + +type AppSessionConnection { + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + A list of edges. + """ + edges: [AppSessionEdge!]! + """ + A list of nodes. + """ + nodes: [AppSession!]! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} + +""" +An edge in a connection. +""" +type AppSessionEdge { + """ + The item at the end of the edge + """ + node: AppSession! + """ + A cursor for use in pagination + """ + cursor: String! +} + +""" +An authentication records when a user enter their credential in a browser +session. +""" +type Authentication implements Node & CreationEvent { + """ + ID of the object. + """ + id: ID! + """ + When the object was created. + """ + createdAt: DateTime! +} + +""" +A browser session represents a logged in user in a browser. +""" +type BrowserSession implements Node & CreationEvent { + """ + ID of the object. + """ + id: ID! + """ + The user logged in this session. + """ + user: User! + """ + The most recent authentication of this session. + """ + lastAuthentication: Authentication + """ + When the object was created. + """ + createdAt: DateTime! + """ + When the session was finished. + """ + finishedAt: DateTime + """ + The state of the session. + """ + state: SessionState! + """ + The user-agent with which the session was created. + """ + userAgent: UserAgent + """ + The last IP address used by the session. + """ + lastActiveIp: String + """ + The last time the session was active. + """ + lastActiveAt: DateTime + """ + Get the list of both compat and OAuth 2.0 sessions started by this + browser session, chronologically sorted + """ + appSessions( + """ + List only sessions in the given state. + """ + state: SessionState + """ + List only sessions for the given device. + """ + device: String + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): AppSessionConnection! +} + +type BrowserSessionConnection { + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + A list of edges. + """ + edges: [BrowserSessionEdge!]! + """ + A list of nodes. + """ + nodes: [BrowserSession!]! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} + +""" +An edge in a connection. +""" +type BrowserSessionEdge { + """ + The item at the end of the edge + """ + node: BrowserSession! + """ + A cursor for use in pagination + """ + cursor: String! +} + +type CaptchaConfig { + """ + Which Captcha service is being used + """ + service: CaptchaService! + """ + The site key used by the instance + """ + siteKey: String! + id: ID! +} + +""" +Which Captcha service is being used +""" +enum CaptchaService { + RECAPTCHA_V2 + CLOUDFLARE_TURNSTILE + H_CAPTCHA +} + +""" +A compat session represents a client session which used the legacy Matrix +login API. +""" +type CompatSession implements Node & CreationEvent { + """ + ID of the object. + """ + id: ID! + """ + The user authorized for this session. + """ + user: User! + """ + The Matrix Device ID of this session. + """ + deviceId: String + """ + When the object was created. + """ + createdAt: DateTime! + """ + When the session ended. + """ + finishedAt: DateTime + """ + The user-agent with which the session was created. + """ + userAgent: UserAgent + """ + The associated SSO login, if any. + """ + ssoLogin: CompatSsoLogin + """ + The browser session which started this session, if any. + """ + browserSession: BrowserSession + """ + The state of the session. + """ + state: SessionState! + """ + The last IP address used by the session. + """ + lastActiveIp: String + """ + The last time the session was active. + """ + lastActiveAt: DateTime + """ + A human-provided name for the session. + """ + humanName: String +} + +type CompatSessionConnection { + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + A list of edges. + """ + edges: [CompatSessionEdge!]! + """ + A list of nodes. + """ + nodes: [CompatSession!]! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} + +""" +An edge in a connection. +""" +type CompatSessionEdge { + """ + The item at the end of the edge + """ + node: CompatSession! + """ + A cursor for use in pagination + """ + cursor: String! +} + +""" +The type of a compatibility session. +""" +enum CompatSessionType { + """ + The session was created by a SSO login. + """ + SSO_LOGIN + """ + The session was created by an unknown method. + """ + UNKNOWN +} + +""" +A compat SSO login represents a login done through the legacy Matrix login +API, via the `m.login.sso` login method. +""" +type CompatSsoLogin implements Node { + """ + ID of the object. + """ + id: ID! + """ + When the object was created. + """ + createdAt: DateTime! + """ + The redirect URI used during the login. + """ + redirectUri: Url! + """ + When the login was fulfilled, and the user was redirected back to the + client. + """ + fulfilledAt: DateTime + """ + When the client exchanged the login token sent during the redirection. + """ + exchangedAt: DateTime + """ + The compat session which was started by this login. + """ + session: CompatSession +} + +type CompatSsoLoginConnection { + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + A list of edges. + """ + edges: [CompatSsoLoginEdge!]! + """ + A list of nodes. + """ + nodes: [CompatSsoLogin!]! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} + +""" +An edge in a connection. +""" +type CompatSsoLoginEdge { + """ + The item at the end of the edge + """ + node: CompatSsoLogin! + """ + A cursor for use in pagination + """ + cursor: String! +} + +""" +The input for the `completeEmailAuthentication` mutation +""" +input CompleteEmailAuthenticationInput { + """ + The authentication code to use + """ + code: String! + """ + The ID of the authentication session to complete + """ + id: ID! +} + +""" +The payload of the `completeEmailAuthentication` mutation +""" +type CompleteEmailAuthenticationPayload { + """ + Status of the operation + """ + status: CompleteEmailAuthenticationStatus! +} + +""" +The status of the `completeEmailAuthentication` mutation +""" +enum CompleteEmailAuthenticationStatus { + """ + The authentication was completed + """ + COMPLETED + """ + The authentication code is invalid + """ + INVALID_CODE + """ + The authentication code has expired + """ + CODE_EXPIRED + """ + Too many attempts to complete an email authentication + """ + RATE_LIMITED + """ + The email address is already in use + """ + IN_USE +} + +""" +The input of the `createOauth2Session` mutation. +""" +input CreateOAuth2SessionInput { + """ + The scope of the session + """ + scope: String! + """ + The ID of the user for which to create the session + """ + userId: ID! + """ + Whether the session should issue a never-expiring access token + """ + permanent: Boolean +} + +""" +The payload of the `createOauth2Session` mutation. +""" +type CreateOAuth2SessionPayload { + """ + Access token for this session + """ + accessToken: String! + """ + Refresh token for this session, if it is not a permanent session + """ + refreshToken: String + """ + The OAuth 2.0 session which was just created + """ + oauth2Session: Oauth2Session! +} + +""" +An object with a creation date. +""" +interface CreationEvent { + """ + When the object was created. + """ + createdAt: DateTime! +} + +""" +A filter for dates, with a lower bound and an upper bound +""" +input DateFilter { + """ + The lower bound of the date range + """ + after: DateTime + """ + The upper bound of the date range + """ + before: DateTime +} + +""" +Implement the DateTime scalar + +The input/output is a string in RFC3339 format. +""" +scalar DateTime + +""" +The input for the `deactivateUser` mutation. +""" +input DeactivateUserInput { + """ + Whether to ask the homeserver to GDPR-erase the user + + This is equivalent to the `erase` parameter on the + `/_matrix/client/v3/account/deactivate` C-S API, which is + implementation-specific. + + What Synapse does is documented here: + + """ + hsErase: Boolean! + """ + The password of the user to deactivate. + """ + password: String +} + +""" +The payload for the `deactivateUser` mutation. +""" +type DeactivateUserPayload { + """ + Status of the operation + """ + status: DeactivateUserStatus! + user: User +} + +""" +The status of the `deactivateUser` mutation. +""" +enum DeactivateUserStatus { + """ + The user was deactivated. + """ + DEACTIVATED + """ + The password was wrong. + """ + INCORRECT_PASSWORD +} + +""" +The type of a user agent +""" +enum DeviceType { + """ + A personal computer, laptop or desktop + """ + PC + """ + A mobile phone. Can also sometimes be a tablet. + """ + MOBILE + """ + A tablet + """ + TABLET + """ + Unknown device type + """ + UNKNOWN +} + +""" +The input of the `endBrowserSession` mutation. +""" +input EndBrowserSessionInput { + """ + The ID of the session to end. + """ + browserSessionId: ID! +} + +type EndBrowserSessionPayload { + """ + The status of the mutation. + """ + status: EndBrowserSessionStatus! + """ + Returns the ended session. + """ + browserSession: BrowserSession +} + +""" +The status of the `endBrowserSession` mutation. +""" +enum EndBrowserSessionStatus { + """ + The session was ended. + """ + ENDED + """ + The session was not found. + """ + NOT_FOUND +} + +""" +The input of the `endCompatSession` mutation. +""" +input EndCompatSessionInput { + """ + The ID of the session to end. + """ + compatSessionId: ID! +} + +type EndCompatSessionPayload { + """ + The status of the mutation. + """ + status: EndCompatSessionStatus! + """ + Returns the ended session. + """ + compatSession: CompatSession +} + +""" +The status of the `endCompatSession` mutation. +""" +enum EndCompatSessionStatus { + """ + The session was ended. + """ + ENDED + """ + The session was not found. + """ + NOT_FOUND +} + +""" +The input of the `endOauth2Session` mutation. +""" +input EndOAuth2SessionInput { + """ + The ID of the session to end. + """ + oauth2SessionId: ID! +} + +type EndOAuth2SessionPayload { + """ + The status of the mutation. + """ + status: EndOAuth2SessionStatus! + """ + Returns the ended session. + """ + oauth2Session: Oauth2Session +} + +""" +The status of the `endOauth2Session` mutation. +""" +enum EndOAuth2SessionStatus { + """ + The session was ended. + """ + ENDED + """ + The session was not found. + """ + NOT_FOUND +} + +""" +The input for the `lockUser` mutation. +""" +input LockUserInput { + """ + The ID of the user to lock. + """ + userId: ID! + """ + Permanently lock the user. + """ + deactivate: Boolean +} + +""" +The payload for the `lockUser` mutation. +""" +type LockUserPayload { + """ + Status of the operation + """ + status: LockUserStatus! + """ + The user that was locked. + """ + user: User +} + +""" +The status of the `lockUser` mutation. +""" +enum LockUserStatus { + """ + The user was locked. + """ + LOCKED + """ + The user was not found. + """ + NOT_FOUND +} + +type MatrixUser { + """ + The Matrix ID of the user. + """ + mxid: String! + """ + The display name of the user, if any. + """ + displayName: String + """ + The avatar URL of the user, if any. + """ + avatarUrl: String + """ + Whether the user is deactivated on the homeserver. + """ + deactivated: Boolean! +} + +""" +The mutations root of the GraphQL interface. +""" +type Mutation { + """ + Add an email address to the specified user + """ + addEmail(input: AddEmailInput!): AddEmailPayload! + @deprecated(reason: "Use `startEmailAuthentication` instead.") + """ + Remove an email address + """ + removeEmail(input: RemoveEmailInput!): RemoveEmailPayload! + """ + Set an email address as primary + """ + setPrimaryEmail(input: SetPrimaryEmailInput!): SetPrimaryEmailPayload! + @deprecated( + reason: "This doesn't do anything anymore, but is kept to avoid breaking existing queries" + ) + """ + Start a new email authentication flow + """ + startEmailAuthentication( + input: StartEmailAuthenticationInput! + ): StartEmailAuthenticationPayload! + """ + Resend the email authentication code + """ + resendEmailAuthenticationCode( + input: ResendEmailAuthenticationCodeInput! + ): ResendEmailAuthenticationCodePayload! + """ + Complete the email authentication flow + """ + completeEmailAuthentication( + input: CompleteEmailAuthenticationInput! + ): CompleteEmailAuthenticationPayload! + """ + Add a user. This is only available to administrators. + """ + addUser(input: AddUserInput!): AddUserPayload! + """ + Lock a user. This is only available to administrators. + """ + lockUser(input: LockUserInput!): LockUserPayload! + """ + Unlock and reactivate a user. This is only available to administrators. + """ + unlockUser(input: UnlockUserInput!): UnlockUserPayload! + """ + Set whether a user can request admin. This is only available to + administrators. + """ + setCanRequestAdmin( + input: SetCanRequestAdminInput! + ): SetCanRequestAdminPayload! + """ + Temporarily allow user to reset their cross-signing keys. + """ + allowUserCrossSigningReset( + input: AllowUserCrossSigningResetInput! + ): AllowUserCrossSigningResetPayload! + """ + Set the password for a user. + + This can be used by server administrators to set any user's password, + or, provided the capability hasn't been disabled on this server, + by a user to change their own password as long as they know their + current password. + """ + setPassword(input: SetPasswordInput!): SetPasswordPayload! + """ + Set the password for yourself, using a recovery ticket sent by e-mail. + """ + setPasswordByRecovery(input: SetPasswordByRecoveryInput!): SetPasswordPayload! + """ + Resend a user recovery email + + This is used when a user opens a recovery link that has expired. In this + case, we display a link for them to get a new recovery email, which + calls this mutation. + """ + resendRecoveryEmail( + input: ResendRecoveryEmailInput! + ): ResendRecoveryEmailPayload! + """ + Deactivate the current user account + + If the user has a password, it *must* be supplied in the `password` + field. + """ + deactivateUser(input: DeactivateUserInput!): DeactivateUserPayload! + """ + Create a new arbitrary OAuth 2.0 Session. + + Only available for administrators. + """ + createOauth2Session( + input: CreateOAuth2SessionInput! + ): CreateOAuth2SessionPayload! + endOauth2Session(input: EndOAuth2SessionInput!): EndOAuth2SessionPayload! + setOauth2SessionName( + input: SetOAuth2SessionNameInput! + ): SetOAuth2SessionNamePayload! + endCompatSession(input: EndCompatSessionInput!): EndCompatSessionPayload! + setCompatSessionName( + input: SetCompatSessionNameInput! + ): SetCompatSessionNamePayload! + endBrowserSession(input: EndBrowserSessionInput!): EndBrowserSessionPayload! + """ + Set the display name of a user + """ + setDisplayName(input: SetDisplayNameInput!): SetDisplayNamePayload! +} + +""" +An object with an ID. +""" +interface Node { + """ + ID of the object. + """ + id: ID! +} + +""" +The application type advertised by the client. +""" +enum Oauth2ApplicationType { + """ + Client is a web application. + """ + WEB + """ + Client is a native application. + """ + NATIVE +} + +""" +An OAuth 2.0 client +""" +type Oauth2Client implements Node { + """ + ID of the object. + """ + id: ID! + """ + OAuth 2.0 client ID + """ + clientId: String! + """ + Client name advertised by the client. + """ + clientName: String + """ + Client URI advertised by the client. + """ + clientUri: Url + """ + Logo URI advertised by the client. + """ + logoUri: Url + """ + Terms of services URI advertised by the client. + """ + tosUri: Url + """ + Privacy policy URI advertised by the client. + """ + policyUri: Url + """ + List of redirect URIs used for authorization grants by the client. + """ + redirectUris: [Url!]! + """ + The application type advertised by the client. + """ + applicationType: Oauth2ApplicationType +} + +""" +An OAuth 2.0 session represents a client session which used the OAuth APIs +to login. +""" +type Oauth2Session implements Node & CreationEvent { + """ + ID of the object. + """ + id: ID! + """ + OAuth 2.0 client used by this session. + """ + client: Oauth2Client! + """ + Scope granted for this session. + """ + scope: String! + """ + When the object was created. + """ + createdAt: DateTime! + """ + When the session ended. + """ + finishedAt: DateTime + """ + The user-agent with which the session was created. + """ + userAgent: UserAgent + """ + The state of the session. + """ + state: SessionState! + """ + The browser session which started this OAuth 2.0 session. + """ + browserSession: BrowserSession + """ + User authorized for this session. + """ + user: User + """ + The last IP address used by the session. + """ + lastActiveIp: String + """ + The last time the session was active. + """ + lastActiveAt: DateTime + """ + The user-provided name for this session. + """ + humanName: String +} + +type Oauth2SessionConnection { + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + A list of edges. + """ + edges: [Oauth2SessionEdge!]! + """ + A list of nodes. + """ + nodes: [Oauth2Session!]! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} + +""" +An edge in a connection. +""" +type Oauth2SessionEdge { + """ + The item at the end of the edge + """ + node: Oauth2Session! + """ + A cursor for use in pagination + """ + cursor: String! +} + +""" +Information about pagination in a connection +""" +type PageInfo { + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + When paginating backwards, the cursor to continue. + """ + startCursor: String + """ + When paginating forwards, the cursor to continue. + """ + endCursor: String +} + +""" +The query root of the GraphQL interface. +""" +type Query { + """ + Get the current logged in browser session + """ + currentBrowserSession: BrowserSession + @deprecated(reason: "Use `viewerSession` instead.") + """ + Get the current logged in user + """ + currentUser: User @deprecated(reason: "Use `viewer` instead.") + """ + Fetch an OAuth 2.0 client by its ID. + """ + oauth2Client(id: ID!): Oauth2Client + """ + Fetch a browser session by its ID. + """ + browserSession(id: ID!): BrowserSession + """ + Fetch a compatible session by its ID. + """ + compatSession(id: ID!): CompatSession + """ + Fetch an OAuth 2.0 session by its ID. + """ + oauth2Session(id: ID!): Oauth2Session + """ + Fetch a user email by its ID. + """ + userEmail(id: ID!): UserEmail + """ + Fetch a user recovery ticket. + """ + userRecoveryTicket(ticket: String!): UserRecoveryTicket + """ + Fetch a user email authentication session + """ + userEmailAuthentication(id: ID!): UserEmailAuthentication + """ + Fetches an object given its ID. + """ + node(id: ID!): Node + """ + Get the current site configuration + """ + siteConfig: SiteConfig! + """ + Fetch a user by its ID. + """ + user(id: ID!): User + """ + Fetch a user by its username. + """ + userByUsername(username: String!): User + """ + Get a list of users. + + This is only available to administrators. + """ + users( + """ + List only users with the given state. + """ + state: UserState + """ + List only users with the given 'canRequestAdmin' value + """ + canRequestAdmin: Boolean + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): UserConnection! + """ + Fetch an upstream OAuth 2.0 link by its ID. + """ + upstreamOauth2Link(id: ID!): UpstreamOAuth2Link + """ + Fetch an upstream OAuth 2.0 provider by its ID. + """ + upstreamOauth2Provider(id: ID!): UpstreamOAuth2Provider + """ + Get a list of upstream OAuth 2.0 providers. + """ + upstreamOauth2Providers( + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): UpstreamOAuth2ProviderConnection! + """ + Lookup a compat or OAuth 2.0 session + """ + session(userId: ID!, deviceId: String!): Session + """ + Get the viewer + """ + viewer: Viewer! + """ + Get the viewer's session + """ + viewerSession: ViewerSession! +} + +""" +The input for the `removeEmail` mutation +""" +input RemoveEmailInput { + """ + The ID of the email address to remove + """ + userEmailId: ID! + """ + The user's current password. This is required if the user is not an + admin and it has a password on its account. + """ + password: String +} + +""" +The payload of the `removeEmail` mutation +""" +type RemoveEmailPayload { + """ + Status of the operation + """ + status: RemoveEmailStatus! + """ + The email address that was removed + """ + email: UserEmail + """ + The user to whom the email address belonged + """ + user: User +} + +""" +The status of the `removeEmail` mutation +""" +enum RemoveEmailStatus { + """ + The email address was removed + """ + REMOVED + """ + The email address was not found + """ + NOT_FOUND + """ + The password provided is incorrect + """ + INCORRECT_PASSWORD +} + +""" +The input for the `resendEmailAuthenticationCode` mutation +""" +input ResendEmailAuthenticationCodeInput { + """ + The ID of the authentication session to resend the code for + """ + id: ID! + """ + The language to use for the email + """ + language: String! = "en" +} + +""" +The payload of the `resendEmailAuthenticationCode` mutation +""" +type ResendEmailAuthenticationCodePayload { + """ + Status of the operation + """ + status: ResendEmailAuthenticationCodeStatus! +} + +""" +The status of the `resendEmailAuthenticationCode` mutation +""" +enum ResendEmailAuthenticationCodeStatus { + """ + The email was resent + """ + RESENT + """ + The email authentication session is already completed + """ + COMPLETED + """ + Too many attempts to resend an email authentication code + """ + RATE_LIMITED +} + +""" +The input for the `resendRecoveryEmail` mutation. +""" +input ResendRecoveryEmailInput { + """ + The recovery ticket to use. + """ + ticket: String! +} + +""" +The return type for the `resendRecoveryEmail` mutation. +""" +type ResendRecoveryEmailPayload { + """ + Status of the operation + """ + status: ResendRecoveryEmailStatus! + """ + URL to continue the recovery process + """ + progressUrl: Url +} + +""" +The status of the `resendRecoveryEmail` mutation. +""" +enum ResendRecoveryEmailStatus { + """ + The recovery ticket was not found. + """ + NO_SUCH_RECOVERY_TICKET + """ + The rate limit was exceeded. + """ + RATE_LIMITED + """ + The recovery email was sent. + """ + SENT +} + +""" +A client session, either compat or OAuth 2.0 +""" +union Session = CompatSession | Oauth2Session + +""" +The state of a session +""" +enum SessionState { + """ + The session is active. + """ + ACTIVE + """ + The session is no longer active. + """ + FINISHED +} + +""" +The input for the `setCanRequestAdmin` mutation. +""" +input SetCanRequestAdminInput { + """ + The ID of the user to update. + """ + userId: ID! + """ + Whether the user can request admin. + """ + canRequestAdmin: Boolean! +} + +""" +The payload for the `setCanRequestAdmin` mutation. +""" +type SetCanRequestAdminPayload { + """ + The user that was updated. + """ + user: User +} + +""" +The input of the `setCompatSessionName` mutation. +""" +input SetCompatSessionNameInput { + """ + The ID of the session to set the name of. + """ + compatSessionId: ID! + """ + The new name of the session. + """ + humanName: String! +} + +type SetCompatSessionNamePayload { + """ + The status of the mutation. + """ + status: SetCompatSessionNameStatus! + """ + The session that was updated. + """ + oauth2Session: CompatSession +} + +""" +The status of the `setCompatSessionName` mutation. +""" +enum SetCompatSessionNameStatus { + """ + The session was updated. + """ + UPDATED + """ + The session was not found. + """ + NOT_FOUND +} + +""" +The input for the `addEmail` mutation +""" +input SetDisplayNameInput { + """ + The ID of the user to add the email address to + """ + userId: ID! + """ + The display name to set. If `None`, the display name will be removed. + """ + displayName: String +} + +""" +The payload of the `setDisplayName` mutation +""" +type SetDisplayNamePayload { + """ + Status of the operation + """ + status: SetDisplayNameStatus! + """ + The user that was updated + """ + user: User +} + +""" +The status of the `setDisplayName` mutation +""" +enum SetDisplayNameStatus { + """ + The display name was set + """ + SET + """ + The display name is invalid + """ + INVALID +} + +""" +The input of the `setOauth2SessionName` mutation. +""" +input SetOAuth2SessionNameInput { + """ + The ID of the session to set the name of. + """ + oauth2SessionId: ID! + """ + The new name of the session. + """ + humanName: String! +} + +type SetOAuth2SessionNamePayload { + """ + The status of the mutation. + """ + status: SetOAuth2SessionNameStatus! + """ + The session that was updated. + """ + oauth2Session: Oauth2Session +} + +""" +The status of the `setOauth2SessionName` mutation. +""" +enum SetOAuth2SessionNameStatus { + """ + The session was updated. + """ + UPDATED + """ + The session was not found. + """ + NOT_FOUND +} + +""" +The input for the `setPasswordByRecovery` mutation. +""" +input SetPasswordByRecoveryInput { + """ + The recovery ticket to use. + This identifies the user as well as proving authorisation to perform the + recovery operation. + """ + ticket: String! + """ + The new password for the user. + """ + newPassword: String! +} + +""" +The input for the `setPassword` mutation. +""" +input SetPasswordInput { + """ + The ID of the user to set the password for. + If you are not a server administrator then this must be your own user + ID. + """ + userId: ID! + """ + The current password of the user. + Required if you are not a server administrator. + """ + currentPassword: String + """ + The new password for the user. + """ + newPassword: String! +} + +""" +The return type for the `setPassword` mutation. +""" +type SetPasswordPayload { + """ + Status of the operation + """ + status: SetPasswordStatus! +} + +""" +The status of the `setPassword` mutation. +""" +enum SetPasswordStatus { + """ + The password was updated. + """ + ALLOWED + """ + The user was not found. + """ + NOT_FOUND + """ + The user doesn't have a current password to attempt to match against. + """ + NO_CURRENT_PASSWORD + """ + The supplied current password was wrong. + """ + WRONG_PASSWORD + """ + The new password is invalid. For example, it may not meet configured + security requirements. + """ + INVALID_NEW_PASSWORD + """ + You aren't allowed to set the password for that user. + This happens if you aren't setting your own password and you aren't a + server administrator. + """ + NOT_ALLOWED + """ + Password support has been disabled. + This usually means that login is handled by an upstream identity + provider. + """ + PASSWORD_CHANGES_DISABLED + """ + The specified recovery ticket does not exist. + """ + NO_SUCH_RECOVERY_TICKET + """ + The specified recovery ticket has already been used and cannot be used + again. + """ + RECOVERY_TICKET_ALREADY_USED + """ + The specified recovery ticket has expired. + """ + EXPIRED_RECOVERY_TICKET + """ + Your account is locked and you can't change its password. + """ + ACCOUNT_LOCKED +} + +""" +The input for the `setPrimaryEmail` mutation +""" +input SetPrimaryEmailInput { + """ + The ID of the email address to set as primary + """ + userEmailId: ID! +} + +""" +The payload of the `setPrimaryEmail` mutation +""" +type SetPrimaryEmailPayload { + status: SetPrimaryEmailStatus! + """ + The user to whom the email address belongs + """ + user: User +} + +""" +The status of the `setPrimaryEmail` mutation +""" +enum SetPrimaryEmailStatus { + """ + The email address was set as primary + """ + SET + """ + The email address was not found + """ + NOT_FOUND + """ + Can't make an unverified email address primary + """ + UNVERIFIED +} + +type SiteConfig implements Node { + """ + The configuration of CAPTCHA provider. + """ + captchaConfig: CaptchaConfig + """ + The server name of the homeserver. + """ + serverName: String! + """ + The URL to the privacy policy. + """ + policyUri: Url + """ + The URL to the terms of service. + """ + tosUri: Url + """ + Imprint to show in the footer. + """ + imprint: String + """ + Whether users can change their email. + """ + emailChangeAllowed: Boolean! + """ + Whether users can change their display name. + """ + displayNameChangeAllowed: Boolean! + """ + Whether passwords are enabled for login. + """ + passwordLoginEnabled: Boolean! + """ + Whether passwords are enabled and users can change their own passwords. + """ + passwordChangeAllowed: Boolean! + """ + Whether passwords are enabled and users can register using a password. + """ + passwordRegistrationEnabled: Boolean! + """ + Whether users can delete their own account. + """ + accountDeactivationAllowed: Boolean! + """ + Minimum password complexity, from 0 to 4, in terms of a zxcvbn score. + The exact scorer (including dictionaries and other data tables) + in use is . + """ + minimumPasswordComplexity: Int! + """ + Whether users can log in with their email address. + """ + loginWithEmailAllowed: Boolean! + """ + Experimental plan management iframe URI. + """ + planManagementIframeUri: String + """ + The ID of the site configuration. + """ + id: ID! +} + +""" +The input for the `startEmailAuthentication` mutation +""" +input StartEmailAuthenticationInput { + """ + The email address to add to the account + """ + email: String! + """ + The user's current password. This is required if the user has a password + on its account. + """ + password: String + """ + The language to use for the email + """ + language: String! = "en" +} + +""" +The payload of the `startEmailAuthentication` mutation +""" +type StartEmailAuthenticationPayload { + """ + Status of the operation + """ + status: StartEmailAuthenticationStatus! + """ + The email authentication session that was started + """ + authentication: UserEmailAuthentication + """ + The list of policy violations if the email address was denied + """ + violations: [String!] +} + +""" +The status of the `startEmailAuthentication` mutation +""" +enum StartEmailAuthenticationStatus { + """ + The email address was started + """ + STARTED + """ + The email address is invalid + """ + INVALID_EMAIL_ADDRESS + """ + Too many attempts to start an email authentication + """ + RATE_LIMITED + """ + The email address isn't allowed by the policy + """ + DENIED + """ + The email address is already in use on this account + """ + IN_USE + """ + The password provided is incorrect + """ + INCORRECT_PASSWORD +} + +""" +The input for the `unlockUser` mutation. +""" +input UnlockUserInput { + """ + The ID of the user to unlock + """ + userId: ID! +} + +""" +The payload for the `unlockUser` mutation. +""" +type UnlockUserPayload { + """ + Status of the operation + """ + status: UnlockUserStatus! + """ + The user that was unlocked. + """ + user: User +} + +""" +The status of the `unlockUser` mutation. +""" +enum UnlockUserStatus { + """ + The user was unlocked. + """ + UNLOCKED + """ + The user was not found. + """ + NOT_FOUND +} + +type UpstreamOAuth2Link implements Node & CreationEvent { + """ + ID of the object. + """ + id: ID! + """ + When the object was created. + """ + createdAt: DateTime! + """ + Subject used for linking + """ + subject: String! + """ + A human-readable name for the link subject. + """ + humanAccountName: String + """ + The provider for which this link is. + """ + provider: UpstreamOAuth2Provider! + """ + The user to which this link is associated. + """ + user: User +} + +type UpstreamOAuth2LinkConnection { + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + A list of edges. + """ + edges: [UpstreamOAuth2LinkEdge!]! + """ + A list of nodes. + """ + nodes: [UpstreamOAuth2Link!]! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} + +""" +An edge in a connection. +""" +type UpstreamOAuth2LinkEdge { + """ + The item at the end of the edge + """ + node: UpstreamOAuth2Link! + """ + A cursor for use in pagination + """ + cursor: String! +} + +type UpstreamOAuth2Provider implements Node & CreationEvent { + """ + ID of the object. + """ + id: ID! + """ + When the object was created. + """ + createdAt: DateTime! + """ + OpenID Connect issuer URL. + """ + issuer: String + """ + Client ID used for this provider. + """ + clientId: String! + """ + A human-readable name for this provider. + """ + humanName: String + """ + A brand identifier for this provider. + + One of `google`, `github`, `gitlab`, `apple` or `facebook`. + """ + brandName: String + """ + URL to start the linking process of the current user with this provider. + """ + linkUrl: Url! +} + +type UpstreamOAuth2ProviderConnection { + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + A list of edges. + """ + edges: [UpstreamOAuth2ProviderEdge!]! + """ + A list of nodes. + """ + nodes: [UpstreamOAuth2Provider!]! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} + +""" +An edge in a connection. +""" +type UpstreamOAuth2ProviderEdge { + """ + The item at the end of the edge + """ + node: UpstreamOAuth2Provider! + """ + A cursor for use in pagination + """ + cursor: String! +} + +""" +URL is a String implementing the [URL Standard](http://url.spec.whatwg.org/) +""" +scalar Url + +""" +A user is an individual's account. +""" +type User implements Node { + """ + ID of the object. + """ + id: ID! + """ + Username chosen by the user. + """ + username: String! + """ + When the object was created. + """ + createdAt: DateTime! + """ + When the user was locked out. + """ + lockedAt: DateTime + """ + Whether the user can request admin privileges. + """ + canRequestAdmin: Boolean! + """ + Access to the user's Matrix account information. + """ + matrix: MatrixUser! + """ + Get the list of compatibility SSO logins, chronologically sorted + """ + compatSsoLogins( + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): CompatSsoLoginConnection! + """ + Get the list of compatibility sessions, chronologically sorted + """ + compatSessions( + """ + List only sessions with the given state. + """ + state: SessionState + """ + List only sessions with the given type. + """ + type: CompatSessionType + """ + List only sessions with a last active time is between the given bounds. + """ + lastActive: DateFilter + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): CompatSessionConnection! + """ + Get the list of active browser sessions, chronologically sorted + """ + browserSessions( + """ + List only sessions in the given state. + """ + state: SessionState + """ + List only sessions with a last active time is between the given bounds. + """ + lastActive: DateFilter + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): BrowserSessionConnection! + """ + Get the list of emails, chronologically sorted + """ + emails( + """ + List only emails in the given state. + """ + state: UserEmailState + @deprecated( + reason: "Emails are always confirmed, and have only one state" + ) + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): UserEmailConnection! + """ + Get the list of OAuth 2.0 sessions, chronologically sorted + """ + oauth2Sessions( + """ + List only sessions in the given state. + """ + state: SessionState + """ + List only sessions for the given client. + """ + client: ID + """ + List only sessions with a last active time is between the given bounds. + """ + lastActive: DateFilter + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): Oauth2SessionConnection! + """ + Get the list of upstream OAuth 2.0 links + """ + upstreamOauth2Links( + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): UpstreamOAuth2LinkConnection! + """ + Get the list of both compat and OAuth 2.0 sessions, chronologically + sorted + """ + appSessions( + """ + List only sessions in the given state. + """ + state: SessionState + """ + List only sessions for the given device. + """ + device: String + """ + List only sessions with a last active time is between the given bounds. + """ + lastActive: DateFilter + """ + List only sessions for the given session. + """ + browserSession: ID + """ + Returns the elements in the list that come after the cursor. + """ + after: String + """ + Returns the elements in the list that come before the cursor. + """ + before: String + """ + Returns the first *n* elements from the list. + """ + first: Int + """ + Returns the last *n* elements from the list. + """ + last: Int + ): AppSessionConnection! + """ + Check if the user has a password set. + """ + hasPassword: Boolean! +} + +""" +A parsed user agent string +""" +type UserAgent { + """ + The user agent string + """ + raw: String! + """ + The name of the browser + """ + name: String + """ + The version of the browser + """ + version: String + """ + The operating system name + """ + os: String + """ + The operating system version + """ + osVersion: String + """ + The device model + """ + model: String + """ + The device type + """ + deviceType: DeviceType! +} + +type UserConnection { + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + A list of edges. + """ + edges: [UserEdge!]! + """ + A list of nodes. + """ + nodes: [User!]! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} + +""" +An edge in a connection. +""" +type UserEdge { + """ + The item at the end of the edge + """ + node: User! + """ + A cursor for use in pagination + """ + cursor: String! +} + +""" +A user email address +""" +type UserEmail implements Node & CreationEvent { + """ + ID of the object. + """ + id: ID! + """ + Email address + """ + email: String! + """ + When the object was created. + """ + createdAt: DateTime! + """ + When the email address was confirmed. Is `null` if the email was never + verified by the user. + """ + confirmedAt: DateTime @deprecated(reason: "Emails are always confirmed now.") +} + +""" +A email authentication session +""" +type UserEmailAuthentication implements Node & CreationEvent { + """ + ID of the object. + """ + id: ID! + """ + When the object was created. + """ + createdAt: DateTime! + """ + When the object was last updated. + """ + completedAt: DateTime + """ + The email address associated with this session + """ + email: String! +} + +type UserEmailConnection { + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + A list of edges. + """ + edges: [UserEmailEdge!]! + """ + A list of nodes. + """ + nodes: [UserEmail!]! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} + +""" +An edge in a connection. +""" +type UserEmailEdge { + """ + The item at the end of the edge + """ + node: UserEmail! + """ + A cursor for use in pagination + """ + cursor: String! +} + +""" +The state of a compatibility session. +""" +enum UserEmailState { + """ + The email address is pending confirmation. + """ + PENDING + """ + The email address has been confirmed. + """ + CONFIRMED +} + +""" +A recovery ticket +""" +type UserRecoveryTicket implements Node & CreationEvent { + """ + ID of the object. + """ + id: ID! + """ + When the object was created. + """ + createdAt: DateTime! + """ + The status of the ticket + """ + status: UserRecoveryTicketStatus! + """ + The username associated with this ticket + """ + username: String! + """ + The email address associated with this ticket + """ + email: String! +} + +""" +The status of a recovery ticket +""" +enum UserRecoveryTicketStatus { + """ + The ticket is valid + """ + VALID + """ + The ticket has expired + """ + EXPIRED + """ + The ticket has been consumed + """ + CONSUMED +} + +""" +The state of a user. +""" +enum UserState { + """ + The user is active. + """ + ACTIVE + """ + The user is locked. + """ + LOCKED +} + +""" +Represents the current viewer +""" +union Viewer = User | Anonymous + +""" +Represents the current viewer's session +""" +union ViewerSession = BrowserSession | Oauth2Session | Anonymous + +""" +Marks an element of a GraphQL schema as no longer supported. +""" +directive @deprecated( + reason: String = "No longer supported" +) on FIELD_DEFINITION | ARGUMENT_DEFINITION | INPUT_FIELD_DEFINITION | ENUM_VALUE +""" +Directs the executor to include this field or fragment only when the `if` argument is true. +""" +directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT +""" +Directs the executor to skip this field or fragment when the `if` argument is true. +""" +directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT +""" +Provides a scalar specification URL for specifying the behavior of custom scalar types. +""" +directive @specifiedBy(url: String!) on SCALAR +schema { + query: Query + mutation: Mutation +} diff --git a/matrix-authentication-service/frontend/src/@types/i18next.d.ts b/matrix-authentication-service/frontend/src/@types/i18next.d.ts new file mode 100644 index 00000000..def4ce45 --- /dev/null +++ b/matrix-authentication-service/frontend/src/@types/i18next.d.ts @@ -0,0 +1,19 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import "i18next"; +import type translation from "../../locales/en.json"; + +declare module "i18next" { + interface CustomTypeOptions { + keySeparator: "."; + pluralSeparator: ":"; + defaultNS: "translation"; + resources: { + translation: typeof translation; + }; + } +} diff --git a/matrix-authentication-service/frontend/src/components/AccountDeleteButton.tsx b/matrix-authentication-service/frontend/src/components/AccountDeleteButton.tsx new file mode 100644 index 00000000..a5ab3099 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/AccountDeleteButton.tsx @@ -0,0 +1,276 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { useMutation } from "@tanstack/react-query"; +import IconDelete from "@vector-im/compound-design-tokens/assets/web/icons/delete"; +import { Alert, Avatar, Button, Form, Text } from "@vector-im/compound-web"; +import { useCallback, useEffect, useState } from "react"; +import { Trans, useTranslation } from "react-i18next"; +import { type FragmentType, graphql, useFragment } from "../gql"; +import { graphqlRequest } from "../graphql"; +import * as Dialog from "./Dialog"; +import LoadingSpinner from "./LoadingSpinner"; +import Separator from "./Separator"; + +export const USER_FRAGMENT = graphql(/* GraphQL */ ` + fragment AccountDeleteButton_user on User { + username + hasPassword + matrix { + mxid + displayName + } + } +`); + +export const CONFIG_FRAGMENT = graphql(/* GraphQL */ ` + fragment AccountDeleteButton_siteConfig on SiteConfig { + passwordLoginEnabled + } +`); + +const MUTATION = graphql(/* GraphQL */ ` + mutation DeactivateUser($hsErase: Boolean!, $password: String) { + deactivateUser(input: { hsErase: $hsErase, password: $password }) { + status + } + } +`); + +type Props = { + user: FragmentType; + siteConfig: FragmentType; +}; + +const UserCard: React.FC<{ + mxid: string; + displayName?: string | null; + username: string; +}> = ({ mxid, displayName, username }) => ( +
+ +
+ + {displayName || username} + + + {mxid} + +
+
+); + +const AccountDeleteButton: React.FC = (props) => { + const user = useFragment(USER_FRAGMENT, props.user); + const siteConfig = useFragment(CONFIG_FRAGMENT, props.siteConfig); + const { t } = useTranslation(); + const mutation = useMutation({ + mutationFn: ({ + password, + hsErase, + }: { + password: string | null; + hsErase: boolean; + }) => + graphqlRequest({ + query: MUTATION, + variables: { password, hsErase }, + }), + onSuccess: (data) => { + if (data.deactivateUser.status === "DEACTIVATED") { + window.location.reload(); + } + }, + }); + + // Track if the form may be valid or not, so that we show the alert and enable + // the submit button only when it is + const [isMaybeValid, setIsMaybeValid] = useState(false); + + // We want to *delay* a little bit the submit button being enabled, so that: + // - the user reads the alert + // - *if the password manager autofills the password*, we ignore any auto-submitting of the form + const [allowSubmitting, setAllowSubmitting] = useState(false); + + useEffect(() => { + // If the value of isMaybeValid switches to true, we want to flip + // 'allowSubmitting' to true a little bit later + if (isMaybeValid) { + const timer = setTimeout(() => { + setAllowSubmitting(true); + }, 500); + return () => clearTimeout(timer); + } + + // If it switches to false, we want to flip 'allowSubmitting' to false + // immediately + setAllowSubmitting(false); + }, [isMaybeValid]); + + const onPasswordInput = useCallback( + (e: React.InputEvent) => { + // We don't know if the password is correct, so we consider the form as + // valid if the field is not empty + setIsMaybeValid(e.currentTarget.value !== ""); + }, + [], + ); + + const onMxidInput = useCallback( + (e: React.InputEvent) => { + setIsMaybeValid(e.currentTarget.value === user.matrix.mxid); + }, + [user.matrix.mxid], + ); + + const onSubmit = useCallback( + (e: React.FormEvent) => { + e.preventDefault(); + if (!allowSubmitting) return; + + const data = new FormData(e.currentTarget); + const password = data.get("password"); + if (password !== null && typeof password !== "string") throw new Error(); + const hsErase = data.get("hs-erase") === "on"; + + mutation.mutate({ password, hsErase }); + }, + [mutation.mutate, allowSubmitting], + ); + + const incorrectPassword = + mutation.data?.deactivateUser.status === "INCORRECT_PASSWORD"; + + // We still consider the form as submitted if the mutation is pending, or if + // the mutation has returned a success, so that we continue showing the + // loading spinner during the page reload + const isSubmitting = + mutation.isPending || + mutation.data?.deactivateUser.status === "DEACTIVATED"; + + const shouldPromptPassword = + user.hasPassword && siteConfig.passwordLoginEnabled; + + return ( + + {t("frontend.account.delete_account.button")} + + } + > + + {t("frontend.account.delete_account.dialog_title")} + + + + , + list:
    , + item: , + profile: ( + + ), + }} + /> + + + + } name="hs-erase"> + + {t("frontend.account.delete_account.erase_checkbox_label")} + + + + + + {shouldPromptPassword ? ( + + + {t("frontend.account.delete_account.password_label")} + + + + + + {t("frontend.errors.field_required")} + + + {incorrectPassword && ( + + {t("frontend.account.delete_account.incorrect_password")} + + )} + + ) : ( + + + {t("frontend.account.delete_account.mxid_label", { + mxid: user.matrix.mxid, + })} + + + + + + {t("frontend.errors.field_required")} + + + value !== user.matrix.mxid}> + {t("frontend.account.delete_account.mxid_mismatch")} + + + )} + + {isMaybeValid && ( + + {t("frontend.account.delete_account.alert_description")} + + )} + + + + + + + + + ); +}; + +export default AccountDeleteButton; diff --git a/matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/AccountManagementPasswordPreview.module.css b/matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/AccountManagementPasswordPreview.module.css new file mode 100644 index 00000000..b2d613a6 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/AccountManagementPasswordPreview.module.css @@ -0,0 +1,24 @@ +/* Copyright 2024, 2025 New Vector Ltd. + * Copyright 2024 The Matrix.org Foundation C.I.C. + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial + * Please see LICENSE files in the repository root for full details. + */ + +.link { + display: inline-block; + text-decoration: underline; + color: var(--cpd-color-text-primary); + font-weight: var(--cpd-font-weight-medium); + border-radius: var(--cpd-radius-pill-effect); + padding-inline: 0.25rem; +} + +.link:hover { + background: var(--cpd-color-gray-300); +} + +.link:active { + background: var(--cpd-color-text-primary); + color: var(--cpd-color-text-on-solid-primary); +} diff --git a/matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/AccountManagementPasswordPreview.tsx b/matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/AccountManagementPasswordPreview.tsx new file mode 100644 index 00000000..c52a61ee --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/AccountManagementPasswordPreview.tsx @@ -0,0 +1,53 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { Link } from "@tanstack/react-router"; +import { Form } from "@vector-im/compound-web"; +import { useTranslation } from "react-i18next"; + +import { type FragmentType, graphql, useFragment } from "../../gql"; + +import styles from "./AccountManagementPasswordPreview.module.css"; + +export const CONFIG_FRAGMENT = graphql(/* GraphQL */ ` + fragment PasswordChange_siteConfig on SiteConfig { + passwordChangeAllowed + } +`); + +export default function AccountManagementPasswordPreview({ + siteConfig, +}: { + siteConfig: FragmentType; +}): React.ReactElement { + const { t } = useTranslation(); + const { passwordChangeAllowed } = useFragment(CONFIG_FRAGMENT, siteConfig); + + return ( + + + {t("frontend.account.password.label")} + + + + + {passwordChangeAllowed && ( + + {t("frontend.account.password.change")} + + )} + + {!passwordChangeAllowed && + t("frontend.account.password.change_disabled")} + + + + ); +} diff --git a/matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/index.ts b/matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/index.ts new file mode 100644 index 00000000..72342153 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/AccountManagementPasswordPreview/index.ts @@ -0,0 +1,7 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +export { default } from "./AccountManagementPasswordPreview"; diff --git a/matrix-authentication-service/frontend/src/components/BrowserSession.tsx b/matrix-authentication-service/frontend/src/components/BrowserSession.tsx new file mode 100644 index 00000000..e4cf861c --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/BrowserSession.tsx @@ -0,0 +1,135 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import IconChrome from "@browser-logos/chrome/chrome_64x64.png?url"; +import IconFirefox from "@browser-logos/firefox/firefox_64x64.png?url"; +import IconSafari from "@browser-logos/safari/safari_64x64.png?url"; +import { Badge } from "@vector-im/compound-web"; +import { parseISO } from "date-fns"; +import { useTranslation } from "react-i18next"; +import { type FragmentType, graphql, useFragment } from "../gql"; +import DateTime from "./DateTime"; +import EndBrowserSessionButton from "./Session/EndBrowserSessionButton"; +import LastActive from "./Session/LastActive"; +import * as Card from "./SessionCard"; + +const FRAGMENT = graphql(/* GraphQL */ ` + fragment BrowserSession_session on BrowserSession { + id + createdAt + finishedAt + ...EndBrowserSessionButton_session + userAgent { + deviceType + name + os + model + } + lastActiveAt + } +`); + +export const browserLogoUri = (browser?: string): string | undefined => { + const lcBrowser = browser?.toLowerCase(); + + if (lcBrowser?.includes("chrome") || lcBrowser?.includes("chromium")) { + return IconChrome; + } + + if (lcBrowser?.includes("firefox")) { + return IconFirefox; + } + + if (lcBrowser?.includes("safari")) { + return IconSafari; + } +}; + +type Props = { + session: FragmentType; + isCurrent: boolean; +}; + +const BrowserSession: React.FC = ({ session, isCurrent }) => { + const data = useFragment(FRAGMENT, session); + const { t } = useTranslation(); + + const deviceType = data.userAgent?.deviceType ?? "UNKNOWN"; + + let deviceName: string | null = null; + let clientName: string | null = null; + + // If we have a model, use that as the device name, and the browser (+ OS) as the client name + if (data.userAgent?.model) { + deviceName = data.userAgent.model; + if (data.userAgent?.name) { + if (data.userAgent?.os) { + clientName = t("frontend.session.name_for_platform", { + name: data.userAgent.name, + platform: data.userAgent.os, + }); + } else { + clientName = data.userAgent.name; + } + } + } else { + // Else use the browser as the device name + deviceName = data.userAgent?.name ?? t("frontend.session.unknown_browser"); + // and if we have an OS, use that as the client name + clientName = data.userAgent?.os ?? null; + } + + const createdAt = parseISO(data.createdAt); + const lastActiveAt = data.lastActiveAt + ? parseISO(data.lastActiveAt) + : undefined; + + return ( + + + + + {clientName && ( + + )} + + + + {lastActiveAt && !isCurrent && ( + + + + )} + + + + + + {isCurrent && ( + + {t("frontend.session.current")} + + )} + + + + {!data.finishedAt && ( + + + + )} + + ); +}; + +export default BrowserSession; diff --git a/matrix-authentication-service/frontend/src/components/ButtonLink.module.css b/matrix-authentication-service/frontend/src/components/ButtonLink.module.css new file mode 100644 index 00000000..82118852 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/ButtonLink.module.css @@ -0,0 +1,13 @@ +/* Copyright 2024, 2025 New Vector Ltd. + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial + * Please see LICENSE files in the repository root for full details. + */ + +/* The weird selector is to have higher specificity than compound-web's button-link */ +a.button-link[href] { + /** This is to undo the following rule in compound-web: + * https://github.com/element-hq/compound-web/blob/6ccb4b6049f3bc8e9739d9452c850ed3c7de49f9/src/components/Button/Button.module.css#L31-L34 + */ + inline-size: initial; +} diff --git a/matrix-authentication-service/frontend/src/components/ButtonLink.tsx b/matrix-authentication-service/frontend/src/components/ButtonLink.tsx new file mode 100644 index 00000000..57362612 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/ButtonLink.tsx @@ -0,0 +1,39 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { createLink } from "@tanstack/react-router"; +import { Button } from "@vector-im/compound-web"; +import cx from "classnames"; +import { forwardRef, type PropsWithChildren } from "react"; +import styles from "./ButtonLink.module.css"; + +type Props = { + kind?: "primary" | "secondary" | "tertiary"; + size?: "sm" | "lg"; + Icon?: React.ComponentType>; + destructive?: boolean; + disabled?: boolean; + className?: string; +} & React.AnchorHTMLAttributes; + +export const ButtonLink = createLink( + forwardRef>( + ({ children, className, ...props }, ref) => { + const disabled = !!props.disabled || !!props["aria-disabled"] || false; + return ( + + ); + }, + ), +); diff --git a/matrix-authentication-service/frontend/src/components/Client/OAuth2ClientDetail.test.tsx b/matrix-authentication-service/frontend/src/components/Client/OAuth2ClientDetail.test.tsx new file mode 100644 index 00000000..ba3e920a --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Client/OAuth2ClientDetail.test.tsx @@ -0,0 +1,64 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// @vitest-environment happy-dom + +import { render } from "@testing-library/react"; +import { describe, expect, it } from "vitest"; + +import { makeFragmentData } from "../../gql/fragment-masking"; + +import OAuth2ClientDetail, { + OAUTH2_CLIENT_FRAGMENT, +} from "./OAuth2ClientDetail"; + +describe("", () => { + const baseClient = { + id: "test-id", + clientId: "client-id", + clientName: "Test Client", + clientUri: "https://client.org/logo.png", + tosUri: "https://client.org/tos", + policyUri: "https://client.org/policy", + redirectUris: ["https://client.org/"], + }; + + it("renders client details", () => { + const data = makeFragmentData(baseClient, OAUTH2_CLIENT_FRAGMENT); + + const { container } = render(); + + expect(container).toMatchSnapshot(); + }); + + it("does not render terms of service when falsy", () => { + const data = makeFragmentData( + { + ...baseClient, + tosUri: undefined, + }, + OAUTH2_CLIENT_FRAGMENT, + ); + + const { queryByText } = render(); + + expect(queryByText("Terms of service")).toBeFalsy(); + }); + + it("does not render logo when logoUri is falsy", () => { + const data = makeFragmentData( + { + ...baseClient, + logoUri: undefined, + }, + OAUTH2_CLIENT_FRAGMENT, + ); + + const { queryByAltText } = render(); + + expect(queryByAltText(baseClient.clientName)).toBeFalsy(); + }); +}); diff --git a/matrix-authentication-service/frontend/src/components/Client/OAuth2ClientDetail.tsx b/matrix-authentication-service/frontend/src/components/Client/OAuth2ClientDetail.tsx new file mode 100644 index 00000000..0720da56 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Client/OAuth2ClientDetail.tsx @@ -0,0 +1,96 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { H3 } from "@vector-im/compound-web"; +import { useTranslation } from "react-i18next"; + +import { type FragmentType, useFragment } from "../../gql"; +import { graphql } from "../../gql/gql"; +import ExternalLink from "../ExternalLink/ExternalLink"; +import ClientAvatar from "../Session/ClientAvatar"; +import * as Info from "../SessionDetail/SessionInfo"; + +export const OAUTH2_CLIENT_FRAGMENT = graphql(/* GraphQL */ ` + fragment OAuth2Client_detail on Oauth2Client { + id + clientId + clientName + clientUri + logoUri + tosUri + policyUri + redirectUris + } +`); + +type Props = { + client: FragmentType; +}; + +const FriendlyExternalLink: React.FC<{ uri?: string }> = ({ uri }) => { + if (!uri) { + return null; + } + const url = new URL(uri); + const friendlyUrl = url.host + url.pathname; + + return {friendlyUrl}; +}; + +const OAuth2ClientDetail: React.FC = ({ client }) => { + const data = useFragment(OAUTH2_CLIENT_FRAGMENT, client); + const { t } = useTranslation(); + + return ( +
    +
    + +

    {data.clientName}

    +
    + + + {t("frontend.oauth2_client_detail.details_title")} + + + {data.clientName && ( + + + {t("frontend.oauth2_client_detail.name")} + + {data.clientName} + + )} + {data.tosUri && ( + + + {t("frontend.oauth2_client_detail.terms")} + + + + + + )} + {data.policyUri && ( + + + {t("frontend.oauth2_client_detail.policy")} + + + + + + )} + + +
    + ); +}; + +export default OAuth2ClientDetail; diff --git a/matrix-authentication-service/frontend/src/components/Client/__snapshots__/OAuth2ClientDetail.test.tsx.snap b/matrix-authentication-service/frontend/src/components/Client/__snapshots__/OAuth2ClientDetail.test.tsx.snap new file mode 100644 index 00000000..d3a8be19 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Client/__snapshots__/OAuth2ClientDetail.test.tsx.snap @@ -0,0 +1,97 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[` > renders client details 1`] = ` +
    +`; diff --git a/matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.module.css b/matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.module.css new file mode 100644 index 00000000..f8aec80d --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.module.css @@ -0,0 +1,52 @@ +/* Copyright 2024, 2025 New Vector Ltd. + * Copyright 2024 The Matrix.org Foundation C.I.C. + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial + * Please see LICENSE files in the repository root for full details. + */ + +.root { + display: flex; + flex-direction: column; + gap: var(--cpd-space-6x); +} + +.heading { + display: flex; + flex-direction: column; + gap: var(--cpd-space-2x); +} + +.trigger { + display: flex; + width: 100%; + align-items: center; + justify-content: space-between; + text-align: start; + gap: var(--cpd-space-2x); +} + +.trigger-title { + cursor: pointer; + flex-grow: 1; +} + +.trigger-icon { + transition: transform 0.1s ease-out; +} + +.root[data-state="closed"] .trigger-icon { + transform: rotate(180deg); +} + +.description { + color: var(--cpd-color-text-secondary); + font: var(--cpd-font-body-md-regular); + letter-spacing: var(--cpd-font-letter-spacing-body-md); +} + +.content { + display: flex; + flex-direction: column; + gap: var(--cpd-space-6x); +} diff --git a/matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.stories.tsx b/matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.stories.tsx new file mode 100644 index 00000000..95c7a8e7 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.stories.tsx @@ -0,0 +1,29 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { Meta, StoryObj } from "@storybook/react-vite"; +import * as Collapsible from "./Collapsible"; + +const meta = { + title: "UI/Collapsible", + component: Collapsible.Section, + tags: ["autodocs"], +} satisfies Meta; + +export default meta; +type Story = StoryObj; + +export const Basic: Story = { + args: { + title: "Section name", + description: "Optional section description", + children: ( +
    +

    Lorem ipsum dolor sit amet, consectetur adipiscing elit.

    +

    Sed id felis eget orci aliquet tincidunt.

    +
    + ), + }, +}; diff --git a/matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.tsx b/matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.tsx new file mode 100644 index 00000000..67f585c7 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Collapsible/Collapsible.tsx @@ -0,0 +1,72 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import * as Collapsible from "@radix-ui/react-collapsible"; +import IconChevronUp from "@vector-im/compound-design-tokens/assets/web/icons/chevron-up"; +import { H4, IconButton } from "@vector-im/compound-web"; +import classNames from "classnames"; +import { useCallback, useId, useState } from "react"; +import { useTranslation } from "react-i18next"; + +import styles from "./Collapsible.module.css"; + +export const Section: React.FC< + { + title: string; + description?: string; + } & Omit< + React.ComponentProps, + "asChild" | "aria-labelledby" | "aria-describedby" | "open" + > +> = ({ title, description, defaultOpen, className, children, ...props }) => { + const { t } = useTranslation(); + const [open, setOpen] = useState(defaultOpen || false); + const titleId = useId(); + const descriptionId = useId(); + const onClick = useCallback((e: React.MouseEvent) => { + e.preventDefault(); + setOpen((open) => !open); + }, []); + + return ( + +
    +
    +
    +

    + {title} +

    + + + + + +
    + + {description && ( +

    + {description} +

    + )} +
    + + +
    {children}
    +
    +
    +
    + ); +}; diff --git a/matrix-authentication-service/frontend/src/components/Collapsible/index.ts b/matrix-authentication-service/frontend/src/components/Collapsible/index.ts new file mode 100644 index 00000000..c1e0960d --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Collapsible/index.ts @@ -0,0 +1,7 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +export * from "./Collapsible"; diff --git a/matrix-authentication-service/frontend/src/components/CompatSession.test.tsx b/matrix-authentication-service/frontend/src/components/CompatSession.test.tsx new file mode 100644 index 00000000..35430c4c --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/CompatSession.test.tsx @@ -0,0 +1,48 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +// @vitest-environment happy-dom + +import { beforeAll, describe, expect, it } from "vitest"; +import { makeFragmentData } from "../gql"; +import { mockLocale } from "../test-utils/mockLocale"; +import render from "../test-utils/render"; +import CompatSession, { FRAGMENT } from "./CompatSession"; + +describe("", () => { + const baseSession = { + id: "session-id", + deviceId: "abcd1234", + createdAt: "2023-06-29T03:35:17.451292+00:00", + lastActiveIp: "1.2.3.4", + ssoLogin: { + id: "test-id", + redirectUri: "https://element.io/", + }, + }; + + const finishedAt = "2023-06-29T03:35:19.451292+00:00"; + + beforeAll(() => mockLocale()); + + it("renders an active session", () => { + const session = makeFragmentData(baseSession, FRAGMENT); + const { asFragment } = render(); + expect(asFragment()).toMatchSnapshot(); + }); + + it("renders a finished session", () => { + const session = makeFragmentData( + { + ...baseSession, + finishedAt, + }, + FRAGMENT, + ); + const { asFragment } = render(); + expect(asFragment()).toMatchSnapshot(); + }); +}); diff --git a/matrix-authentication-service/frontend/src/components/CompatSession.tsx b/matrix-authentication-service/frontend/src/components/CompatSession.tsx new file mode 100644 index 00000000..c52295ba --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/CompatSession.tsx @@ -0,0 +1,111 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { parseISO } from "date-fns"; +import { useTranslation } from "react-i18next"; +import { type FragmentType, graphql, useFragment } from "../gql"; +import simplifyUrl from "../utils/simplifyUrl"; +import { browserLogoUri } from "./BrowserSession"; +import DateTime from "./DateTime"; +import EndCompatSessionButton from "./Session/EndCompatSessionButton"; +import LastActive from "./Session/LastActive"; +import * as Card from "./SessionCard"; + +export const FRAGMENT = graphql(/* GraphQL */ ` + fragment CompatSession_session on CompatSession { + id + createdAt + deviceId + finishedAt + lastActiveIp + lastActiveAt + humanName + ...EndCompatSessionButton_session + userAgent { + name + os + model + deviceType + } + ssoLogin { + id + redirectUri + } + } +`); + +const CompatSession: React.FC<{ + session: FragmentType; +}> = ({ session }) => { + const { t } = useTranslation(); + const data = useFragment(FRAGMENT, session); + + const clientName = + data.humanName ?? + (data.ssoLogin?.redirectUri + ? simplifyUrl(data.ssoLogin.redirectUri) + : undefined); + + const deviceType = data.userAgent?.deviceType ?? "UNKNOWN"; + + const deviceName = + data.userAgent?.model ?? + (data.userAgent?.name + ? data.userAgent?.os + ? t("frontend.session.name_for_platform", { + name: data.userAgent.name, + platform: data.userAgent.os, + }) + : data.userAgent.name + : t("frontend.session.unknown_device")); + + const createdAt = parseISO(data.createdAt); + const lastActiveAt = data.lastActiveAt + ? parseISO(data.lastActiveAt) + : undefined; + + return ( + + + + + {clientName && ( + + )} + + + + {lastActiveAt && ( + + + + )} + + + + + {data.deviceId} + + + + + {!data.finishedAt && ( + + + + )} + + ); +}; + +export default CompatSession; diff --git a/matrix-authentication-service/frontend/src/components/DateTime.stories.tsx b/matrix-authentication-service/frontend/src/components/DateTime.stories.tsx new file mode 100644 index 00000000..4ed54f7a --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/DateTime.stories.tsx @@ -0,0 +1,59 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { sub } from "date-fns"; + +import DateTime from "./DateTime"; + +const now = new Date(2022, 11, 16, 15, 32, 10); + +const meta = { + title: "UI/DateTime", + component: DateTime, + tags: ["autodocs"], + args: { + now, + datetime: sub(now, { minutes: 30 }), + }, + argTypes: { + now: { + control: "date", + }, + datetime: { + control: "date", + }, + }, +} satisfies Meta; + +export default meta; +type Story = StoryObj; + +export const Basic: Story = {}; + +export const Now: Story = { + args: { + datetime: now, + }, +}; + +export const SecondsAgo: Story = { + args: { + datetime: sub(now, { seconds: 30 }), + }, +}; + +export const MinutesAgo: Story = { + args: { + datetime: sub(now, { minutes: 5 }), + }, +}; + +export const HoursAgo: Story = { + args: { + datetime: sub(now, { hours: 5 }), + }, +}; diff --git a/matrix-authentication-service/frontend/src/components/DateTime.tsx b/matrix-authentication-service/frontend/src/components/DateTime.tsx new file mode 100644 index 00000000..bd7e904e --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/DateTime.tsx @@ -0,0 +1,58 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2022-2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { + differenceInHours, + formatISO, + intlFormat, + intlFormatDistance, + parseISO, +} from "date-fns"; + +type Props = { + className?: string; + datetime: Date | string; + now?: Date; +}; + +export const formatDate = (datetime: Date): string => + intlFormat(datetime, { + year: "numeric", + month: "short", + day: "numeric", + weekday: "short", + hour: "numeric", + minute: "numeric", + }); + +/** + * Formats a datetime + * Uses distance when less than an hour ago + * Else internationalised `Fri, 21 Jul 2023, 16:14` + */ +export const formatReadableDate = (datetime: Date, now: Date): string => + Math.abs(differenceInHours(now, datetime, { roundingMethod: "round" })) > 1 + ? formatDate(datetime) + : intlFormatDistance(datetime, now); + +const DateTime: React.FC = ({ + datetime: datetimeProps, + now: nowProps, + className, +}) => { + const datetime = + typeof datetimeProps === "string" ? parseISO(datetimeProps) : datetimeProps; + const now = nowProps || new Date(); + const text = formatReadableDate(datetime, now); + + return ( + + ); +}; + +export default DateTime; diff --git a/matrix-authentication-service/frontend/src/components/Dialog/Dialog.module.css b/matrix-authentication-service/frontend/src/components/Dialog/Dialog.module.css new file mode 100644 index 00000000..eceb0c3b --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Dialog/Dialog.module.css @@ -0,0 +1,134 @@ +/* Copyright 2024, 2025 New Vector Ltd. + * Copyright 2024 The Matrix.org Foundation C.I.C. + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial + * Please see LICENSE files in the repository root for full details. + */ + +.overlay, +.scroll-container { + position: fixed; + inset: 0; + background: rgba(3 12 27 / 52.8%); +} + +.scroll-container { + overflow-y: auto; +} + +.container { + display: flex; + align-items: center; + justify-content: center; + min-height: 100svh; +} + +.dialog { + /* To position the close icon */ + position: relative; + margin: var(--cpd-space-4x); + min-width: 0; + flex: 0 1 520px; +} + +.body { + display: flex; + flex-direction: column; + gap: var(--cpd-space-4x); + background: var(--cpd-color-bg-canvas-default); +} + +.title { + font: var(--cpd-font-heading-md-semibold); + letter-spacing: var(--cpd-font-letter-spacing-heading-md); + color: var(--cpd-color-text-primary); + margin-block-start: var(--cpd-space-4x); +} + +.dialog .body { + padding: var(--cpd-space-4x) var(--cpd-space-10x) var(--cpd-space-10x); +} + +.dialog .title { + /* This adds a padding to the title to make sure it overflows correctly + * and not behind the close button */ + padding-inline-end: var(--cpd-space-7x); +} + +.dialog .close { + position: absolute; + inset-block-start: var(--cpd-space-7x); + inset-inline-end: var(--cpd-space-7x); + padding: var(--cpd-space-1x); + border-radius: var(--cpd-radius-pill-effect); + background: var(--cpd-color-bg-subtle-secondary); + color: var(--cpd-color-icon-secondary); + + & > svg { + inline-size: var(--cpd-space-5x); + block-size: var(--cpd-space-5x); + } + + &:hover { + background: var(--cpd-color-bg-subtle-primary); + color: var(--cpd-color-icon-primary); + } +} + +.drawer { + position: fixed; + background: var(--cpd-color-bg-canvas-default); + inset-block-end: 0; + inset-inline: 0; + + /* Cap the inline content size at 520px, filling the rest of the space with + padding */ + padding-inline: max(0px, calc((100% - 520px) / 2)); + border-start-start-radius: var(--border-radius); + border-start-end-radius: var(--border-radius); + display: flex; + flex-direction: column; + + /* Make sure the border radius cuts the content */ + overflow: hidden; + contain: paint; + + /* Cap the block size */ + max-block-size: calc(100vh - var(--cpd-space-4x)); + /* biome-ignore lint/suspicious/noDuplicateProperties: this isn't a real duplicate */ + max-block-size: calc(100svh - var(--cpd-space-4x)); + + /* Drawer comes in the Android style by default */ + --border-radius: 28px; + --handle-block-size: 4px; + --handle-inline-size: 32px; + --handle-inset-block-start: var(--cpd-space-4x); + --handle-inset-block-end: 0px; +} + +.drawer[data-platform="ios"] { + --border-radius: 10px; + --handle-block-size: 5px; + --handle-inline-size: 36px; + --handle-inset-block-start: var(--cpd-space-1-5x); + --handle-inset-block-end: 1px; +} + +.drawer .body { + overflow-y: auto; + scrollbar-width: none; + padding-inline: var(--cpd-space-4x); + margin-block-start: var(--cpd-space-2x); + padding-block-start: var(--cpd-space-4x); + padding-block-end: var(--cpd-space-12x); +} + +.drawer .handle { + align-self: center; + block-size: var(--handle-block-size); + inline-size: var(--handle-inline-size); + margin-block-start: var(--handle-inset-block-start); + margin-block-end: var(--handle-inset-block-end); + background: var(--cpd-color-icon-secondary); + border-radius: var(--cpd-radius-pill-effect); +} diff --git a/matrix-authentication-service/frontend/src/components/Dialog/Dialog.stories.tsx b/matrix-authentication-service/frontend/src/components/Dialog/Dialog.stories.tsx new file mode 100644 index 00000000..a1732a71 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Dialog/Dialog.stories.tsx @@ -0,0 +1,55 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { action } from "storybook/actions"; + +import { Description, Dialog, Title } from "./Dialog"; + +const Template: React.FC<{ + title: string; + description: string; + asDrawer: boolean; + open: boolean; + onOpenChange: (open: boolean) => void; +}> = ({ title, description, asDrawer, open, onOpenChange }) => ( + + {title && {title}} + {description} + +); + +const meta = { + title: "UI/Dialog", + component: Template, + tags: ["autodocs"], + args: { + open: true, + title: "Title", + description: "Description", + asDrawer: false, + onOpenChange: action("onOpenChange"), + }, + argTypes: { + open: { control: "boolean" }, + title: { control: "text" }, + description: { control: "text" }, + asDrawer: { control: "boolean" }, + onOpenChange: { action: "onOpenChange" }, + }, +} satisfies Meta; + +export default meta; +type Story = StoryObj; + +export const Basic: Story = {}; + +export const LongText: Story = { + args: { + description: + "Lorem ipsum dolor sit amet, officia excepteur ex fugiat reprehenderit enim labore culpa sint ad nisi Lorem pariatur mollit ex esse exercitation amet. Nisi anim cupidatat excepteur officia. Reprehenderit nostrud nostrud ipsum Lorem est aliquip amet voluptate voluptate dolor minim nulla est proident. Nostrud officia pariatur ut officia. Sit irure elit esse ea nulla sunt ex occaecat reprehenderit commodo officia dolor Lorem duis laboris cupidatat officia voluptate. Culpa proident adipisicing id nulla nisi laboris ex in Lorem sunt duis officia eiusmod. Aliqua reprehenderit commodo ex non excepteur duis sunt velit enim. Voluptate laboris sint cupidatat ullamco ut ea consectetur et est culpa et culpa duis.", + }, +}; diff --git a/matrix-authentication-service/frontend/src/components/Dialog/Dialog.tsx b/matrix-authentication-service/frontend/src/components/Dialog/Dialog.tsx new file mode 100644 index 00000000..45dba57a --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Dialog/Dialog.tsx @@ -0,0 +1,105 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { + Close, + Content as DialogContent, + Overlay as DialogOverlay, + Root as DialogRoot, + Title as DialogTitle, + Portal, + Trigger, +} from "@radix-ui/react-dialog"; +import IconClose from "@vector-im/compound-design-tokens/assets/web/icons/close"; +import { Glass, Tooltip } from "@vector-im/compound-web"; +import type { PropsWithChildren } from "react"; +import { useTranslation } from "react-i18next"; +import { Drawer } from "vaul"; + +import styles from "./Dialog.module.css"; + +// The granularity of this value is kind of arbitrary: it distinguishes exactly +// the platforms that this library needs to know about in order to correctly +// implement the designs. +let platform: "android" | "ios" | "other" = "other"; + +if (/android/i.test(navigator.userAgent)) { + platform = "android"; + // We include 'Mac' here and double-check for touch support because iPads on + // iOS 13 pretend to be a MacOS desktop +} else if ( + /iPad|iPhone|iPod|Mac/.test(navigator.userAgent) && + "ontouchend" in document +) { + platform = "ios"; +} + +type Props = React.PropsWithChildren<{ + trigger?: React.ReactNode; + open?: boolean; + asDrawer?: boolean; + onOpenChange?: (open: boolean) => void; +}>; + +export const Dialog: React.FC = ({ + trigger, + open, + asDrawer, + onOpenChange, + children, +}) => { + if (typeof asDrawer !== "boolean") { + asDrawer = platform !== "other"; + } + + const { t } = useTranslation(); + + if (asDrawer) { + return ( + + {trigger && {trigger}} + + + + +
    {children}
    +
    +
    +
    + ); + } + + return ( + + {trigger && {trigger}} + + {/* This container has a fixed position and scrolls over the Y axis if needed */} + + {/* This container is used as a flexbox parent to center the dialog */} +
    + + + {children} + + + + + + + + +
    +
    +
    +
    + ); +}; + +export const Title: React.FC = ({ children }) => ( + {children} +); + +export { Close, Description } from "@radix-ui/react-dialog"; diff --git a/matrix-authentication-service/frontend/src/components/Dialog/index.ts b/matrix-authentication-service/frontend/src/components/Dialog/index.ts new file mode 100644 index 00000000..73467a18 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Dialog/index.ts @@ -0,0 +1,7 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +export { Close, Description, Dialog, Title } from "./Dialog"; diff --git a/matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.module.css b/matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.module.css new file mode 100644 index 00000000..e83a56df --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.module.css @@ -0,0 +1,17 @@ +/* Copyright 2024, 2025 New Vector Ltd. + * Copyright 2024 The Matrix.org Foundation C.I.C. + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial + * Please see LICENSE files in the repository root for full details. + */ + +.empty-state { + display: flex; + flex-direction: column; + gap: var(--cpd-space-2x); + padding: var(--cpd-space-4x); + background: var(--cpd-color-gray-200); + color: var(--cpd-color-text-secondary); + font: var(--cpd-font-body-sm-regular); + letter-spacing: var(--cpd-font-letter-spacing-body-sm); +} diff --git a/matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.stories.tsx b/matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.stories.tsx new file mode 100644 index 00000000..33b32b3a --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.stories.tsx @@ -0,0 +1,27 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { Meta, StoryObj } from "@storybook/react-vite"; + +import { EmptyState } from "./EmptyState"; + +const meta = { + title: "UI/EmptyState", + component: EmptyState, + tags: ["autodocs"], + args: { + children: "No results", + }, +} satisfies Meta; + +export default meta; +type Story = StoryObj; + +export const Basic: Story = { + args: { + children: "No results", + }, +}; diff --git a/matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.tsx b/matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.tsx new file mode 100644 index 00000000..b40e3e88 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/EmptyState/EmptyState.tsx @@ -0,0 +1,25 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import classNames from "classnames"; +import { forwardRef } from "react"; + +import styles from "./EmptyState.module.css"; + +/** + * A component to display a message when a list is empty + */ +export const EmptyState = forwardRef< + HTMLDivElement, + React.ComponentPropsWithoutRef<"div"> +>(function EmptyState({ children, ...props }, ref) { + const className = classNames(styles.emptyState, props.className); + return ( +
    + {children} +
    + ); +}); diff --git a/matrix-authentication-service/frontend/src/components/EmptyState/index.ts b/matrix-authentication-service/frontend/src/components/EmptyState/index.ts new file mode 100644 index 00000000..6acdbdfd --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/EmptyState/index.ts @@ -0,0 +1,7 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +export { EmptyState as default } from "./EmptyState"; diff --git a/matrix-authentication-service/frontend/src/components/ErrorBoundary.tsx b/matrix-authentication-service/frontend/src/components/ErrorBoundary.tsx new file mode 100644 index 00000000..ad460f87 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/ErrorBoundary.tsx @@ -0,0 +1,57 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { type ErrorInfo, PureComponent, type ReactNode } from "react"; + +import GenericError from "./GenericError"; +import Layout from "./Layout"; + +interface Props { + children: ReactNode; +} + +interface IState { + error?: Error; +} + +/** + * This error boundary component can be used to wrap large content areas and + * catch exceptions during rendering in the component tree below them. + */ +export default class ErrorBoundary extends PureComponent { + public constructor(props: Props) { + super(props); + + this.state = {}; + } + + public static getDerivedStateFromError(error: Error): Partial { + // Side effects are not permitted here, so we only update the state so + // that the next render shows an error message. + return { error }; + } + + public componentDidCatch(error: Error, { componentStack }: ErrorInfo): void { + console.error(error); + console.error( + "The above error occurred while React was rendering the following components:", + componentStack, + ); + } + + public render(): ReactNode { + if (this.state.error) { + // We ask the child components not to suspend, as this error boundary won't be in a Suspense boundary. + return ( + + + + ); + } + + return this.props.children; + } +} diff --git a/matrix-authentication-service/frontend/src/components/ExternalLink/ExternalLink.module.css b/matrix-authentication-service/frontend/src/components/ExternalLink/ExternalLink.module.css new file mode 100644 index 00000000..2d02ce33 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/ExternalLink/ExternalLink.module.css @@ -0,0 +1,11 @@ +/* Copyright 2024, 2025 New Vector Ltd. + * Copyright 2023, 2024 The Matrix.org Foundation C.I.C. + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial + * Please see LICENSE files in the repository root for full details. + */ + +.external-link { + /* override compound style */ + color: var(--cpd-color-text-link-external) !important; +} diff --git a/matrix-authentication-service/frontend/src/components/ExternalLink/ExternalLink.tsx b/matrix-authentication-service/frontend/src/components/ExternalLink/ExternalLink.tsx new file mode 100644 index 00000000..7b75891f --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/ExternalLink/ExternalLink.tsx @@ -0,0 +1,26 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { Link } from "@vector-im/compound-web"; +import classNames from "classnames"; + +import styles from "./ExternalLink.module.css"; + +const ExternalLink: React.FC> = ({ + children, + className, + ...props +}) => ( + + {children} + +); + +export default ExternalLink; diff --git a/matrix-authentication-service/frontend/src/components/Filter/Filter.module.css b/matrix-authentication-service/frontend/src/components/Filter/Filter.module.css new file mode 100644 index 00000000..6bd2e392 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Filter/Filter.module.css @@ -0,0 +1,51 @@ +/* Copyright 2024, 2025 New Vector Ltd. + * Copyright 2024 The Matrix.org Foundation C.I.C. + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial + * Please see LICENSE files in the repository root for full details. + */ + +.filter { + display: flex; + align-items: center; + gap: var(--cpd-space-2x); + font: var(--cpd-font-body-xs-regular); + letter-spacing: var(--cpd-font-letter-spacing-body-xs); + padding: var(--cpd-space-2x) var(--cpd-space-3x); + border-radius: var(--cpd-radius-pill-effect); +} + +.enabled-filter { + background: var(--cpd-color-bg-action-primary-rest); + color: var(--cpd-color-text-on-solid-primary); + + & > .close-icon { + height: var(--cpd-space-4x); + width: var(--cpd-space-4x); + opacity: 0.5; + } + + &:hover { + background: var(--cpd-color-bg-action-primary-hovered); + & > .close-icon { + opacity: 1; + } + } + + &:active { + background: var(--cpd-color-bg-action-primary-rest); + & > .close-icon { + opacity: 1; + } + } +} + +.disabled-filter { + color: var(--cpd-color-text-action-primary); + background: var(--cpd-color-bg-canvas-default); + outline: 1px solid var(--cpd-color-border-interactive-secondary); + + &:hover { + background: var(--cpd-color-bg-subtle-secondary); + } +} diff --git a/matrix-authentication-service/frontend/src/components/Filter/Filter.stories.tsx b/matrix-authentication-service/frontend/src/components/Filter/Filter.stories.tsx new file mode 100644 index 00000000..a9d8fa3d --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Filter/Filter.stories.tsx @@ -0,0 +1,41 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { Meta, StoryObj } from "@storybook/react-vite"; + +import { Filter } from "./Filter"; + +const meta = { + title: "UI/Filter", + component: Filter, + tags: ["autodocs"], + args: { + children: "Filter", + enabled: false, + }, + decorators: [ + (Story): React.ReactElement => ( +
    + +
    + ), + ], +} satisfies Meta; + +export default meta; +type Story = StoryObj; + +export const Disabled: Story = { + args: { + enabled: false, + }, +}; + +export const Enabled: Story = { + args: { + enabled: true, + }, +}; diff --git a/matrix-authentication-service/frontend/src/components/Filter/Filter.tsx b/matrix-authentication-service/frontend/src/components/Filter/Filter.tsx new file mode 100644 index 00000000..a7c10800 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Filter/Filter.tsx @@ -0,0 +1,39 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import { createLink } from "@tanstack/react-router"; +import CloseIcon from "@vector-im/compound-design-tokens/assets/web/icons/close"; +import classNames from "classnames"; +import { forwardRef } from "react"; + +import styles from "./Filter.module.css"; + +type Props = React.ComponentPropsWithRef<"a"> & { + enabled?: boolean; +}; + +/** + * A link which looks like a chip used when filtering items + */ +export const Filter = createLink( + forwardRef(function Filter( + { children, enabled, ...props }, + ref, + ) { + const className = classNames( + styles.filter, + enabled ? styles.enabledFilter : styles.disabledFilter, + props.className, + ); + + return ( + + {children} + {enabled && } + + ); + }), +); diff --git a/matrix-authentication-service/frontend/src/components/Filter/index.ts b/matrix-authentication-service/frontend/src/components/Filter/index.ts new file mode 100644 index 00000000..e544ba38 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Filter/index.ts @@ -0,0 +1,7 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +export { Filter as default } from "./Filter"; diff --git a/matrix-authentication-service/frontend/src/components/Footer/Footer.module.css b/matrix-authentication-service/frontend/src/components/Footer/Footer.module.css new file mode 100644 index 00000000..65b48298 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Footer/Footer.module.css @@ -0,0 +1,33 @@ +/* Copyright 2024, 2025 New Vector Ltd. + * Copyright 2023, 2024 The Matrix.org Foundation C.I.C. + * + * SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial + * Please see LICENSE files in the repository root for full details. + */ + +.legal-footer { + display: flex; + flex-direction: column; + gap: var(--cpd-space-2x); + margin-block-start: auto; + + font: var(--cpd-font-body-sm-regular); + letter-spacing: var(--cpd-font-letter-spacing-body-sm); + + & nav { + display: flex; + gap: var(--cpd-space-2x); + align-items: center; + justify-content: center; + text-align: center; + + & .separator { + color: var(--cpd-color-text-secondary); + } + } + + & .imprint { + color: var(--cpd-color-text-secondary); + text-align: center; + } +} diff --git a/matrix-authentication-service/frontend/src/components/Footer/Footer.stories.tsx b/matrix-authentication-service/frontend/src/components/Footer/Footer.stories.tsx new file mode 100644 index 00000000..82e01a93 --- /dev/null +++ b/matrix-authentication-service/frontend/src/components/Footer/Footer.stories.tsx @@ -0,0 +1,72 @@ +// Copyright 2024, 2025 New Vector Ltd. +// Copyright 2023, 2024 The Matrix.org Foundation C.I.C. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +import type { Meta, StoryObj } from "@storybook/react-vite"; + +import { makeFragmentData } from "../../gql"; + +import Footer, { FRAGMENT } from "./Footer"; + +const Template: React.FC<{ + tosUri?: string; + policyUri?: string; + imprint?: string; +}> = ({ tosUri, policyUri, imprint }) => ( +